From 100a0516d43198b6421268b68dc65a61fe45c3e6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 25 Jun 2024 12:04:22 +0200 Subject: [PATCH 1/9] Add context type and value to path rewriting (#1525) ## Changes For a future change where the inner rewriting functions need access to the underlying bundle, this change makes preparations. All values were passed via the stack before and adding yet another value would make the code less readable. ## Tests Unit tests pass. --- bundle/config/mutator/translate_paths.go | 69 +++++++++++-------- .../mutator/translate_paths_artifacts.go | 28 +++++--- bundle/config/mutator/translate_paths_jobs.go | 63 +++++++++-------- .../mutator/translate_paths_pipelines.go | 38 ++++++---- 4 files changed, 115 insertions(+), 83 deletions(-) diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index d9ab9e9e..4224eafd 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -33,9 +33,7 @@ func (err ErrIsNotNotebook) Error() string { return fmt.Sprintf("file at %s is not a notebook", err.path) } -type translatePaths struct { - seen map[string]string -} +type translatePaths struct{} // TranslatePaths converts paths to local notebook files into paths in the workspace file system. func TranslatePaths() bundle.Mutator { @@ -48,6 +46,18 @@ func (m *translatePaths) Name() string { type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error) +// translateContext is a context for rewriting paths in a config. +// It is freshly instantiated on every mutator apply call. +// It provides access to the underlying bundle object such that +// it doesn't have to be passed around explicitly. +type translateContext struct { + b *bundle.Bundle + + // seen is a map of local paths to their corresponding remote paths. + // If a local path has already been successfully resolved, we do not need to resolve it again. + seen map[string]string +} + // rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function // // It takes these arguments: @@ -57,14 +67,13 @@ type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) ( // This logic is different between regular files or notebooks. // // The function returns an error if it is impossible to rewrite the given relative path. -func (m *translatePaths) rewritePath( +func (t *translateContext) rewritePath( dir string, - b *bundle.Bundle, p *string, fn rewriteFunc, ) error { // We assume absolute paths point to a location in the workspace - if path.IsAbs(filepath.ToSlash(*p)) { + if path.IsAbs(*p) { return nil } @@ -80,13 +89,14 @@ func (m *translatePaths) rewritePath( // Local path is relative to the directory the resource was defined in. localPath := filepath.Join(dir, filepath.FromSlash(*p)) - if interp, ok := m.seen[localPath]; ok { + if interp, ok := t.seen[localPath]; ok { *p = interp return nil } - // Remote path must be relative to the bundle root. - localRelPath, err := filepath.Rel(b.RootPath, localPath) + // Local path must be contained in the bundle root. + // If it isn't, it won't be synchronized into the workspace. + localRelPath, err := filepath.Rel(t.b.RootPath, localPath) if err != nil { return err } @@ -95,20 +105,20 @@ func (m *translatePaths) rewritePath( } // Prefix remote path with its remote root path. - remotePath := path.Join(b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath)) + remotePath := path.Join(t.b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath)) // Convert local path into workspace path via specified function. - interp, err := fn(*p, localPath, localRelPath, filepath.ToSlash(remotePath)) + interp, err := fn(*p, localPath, localRelPath, remotePath) if err != nil { return err } *p = interp - m.seen[localPath] = interp + t.seen[localPath] = interp return nil } -func translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { +func (t *translateContext) translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { nb, _, err := notebook.Detect(localFullPath) if errors.Is(err, fs.ErrNotExist) { return "", fmt.Errorf("notebook %s not found", literal) @@ -124,7 +134,7 @@ func translateNotebookPath(literal, localFullPath, localRelPath, remotePath stri return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil } -func translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) { +func (t *translateContext) translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) { nb, _, err := notebook.Detect(localFullPath) if errors.Is(err, fs.ErrNotExist) { return "", fmt.Errorf("file %s not found", literal) @@ -138,7 +148,7 @@ func translateFilePath(literal, localFullPath, localRelPath, remotePath string) return remotePath, nil } -func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { +func (t *translateContext) translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { info, err := os.Stat(localFullPath) if err != nil { return "", err @@ -149,20 +159,20 @@ func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath str return remotePath, nil } -func translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) { +func (t *translateContext) translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) { return localRelPath, nil } -func translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) { +func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) { if !strings.HasPrefix(localRelPath, ".") { localRelPath = "." + string(filepath.Separator) + localRelPath } return localRelPath, nil } -func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) { +func (t *translateContext) rewriteValue(p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) { out := v.MustString() - err := m.rewritePath(dir, b, &out, fn) + err := t.rewritePath(dir, &out, fn) if err != nil { if target := (&ErrIsNotebook{}); errors.As(err, target) { return dyn.InvalidValue, fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, p, target) @@ -176,15 +186,15 @@ func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value, return dyn.NewValue(out, v.Location()), nil } -func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) { - nv, err := m.rewriteValue(b, p, v, fn, dir) +func (t *translateContext) rewriteRelativeTo(p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) { + nv, err := t.rewriteValue(p, v, fn, dir) if err == nil { return nv, nil } // If we failed to rewrite the path, try to rewrite it relative to the fallback directory. if fallback != "" { - nv, nerr := m.rewriteValue(b, p, v, fn, fallback) + nv, nerr := t.rewriteValue(p, v, fn, fallback) if nerr == nil { // TODO: Emit a warning that this path should be rewritten. return nv, nil @@ -195,16 +205,19 @@ func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.V } func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { - m.seen = make(map[string]string) + t := &translateContext{ + b: b, + seen: make(map[string]string), + } err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { var err error - for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){ - m.applyJobTranslations, - m.applyPipelineTranslations, - m.applyArtifactTranslations, + for _, fn := range []func(dyn.Value) (dyn.Value, error){ + t.applyJobTranslations, + t.applyPipelineTranslations, + t.applyArtifactTranslations, } { - v, err = fn(b, v) + v, err = fn(v) if err != nil { return dyn.InvalidValue, err } diff --git a/bundle/config/mutator/translate_paths_artifacts.go b/bundle/config/mutator/translate_paths_artifacts.go index 7bda04ee..921c00c7 100644 --- a/bundle/config/mutator/translate_paths_artifacts.go +++ b/bundle/config/mutator/translate_paths_artifacts.go @@ -3,36 +3,42 @@ package mutator import ( "fmt" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/dyn" ) -func (m *translatePaths) applyArtifactTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { - var err error +type artifactRewritePattern struct { + pattern dyn.Pattern + fn rewriteFunc +} +func (t *translateContext) artifactRewritePatterns() []artifactRewritePattern { // Base pattern to match all artifacts. base := dyn.NewPattern( dyn.Key("artifacts"), dyn.AnyKey(), ) - for _, t := range []struct { - pattern dyn.Pattern - fn rewriteFunc - }{ + // Compile list of configuration paths to rewrite. + return []artifactRewritePattern{ { base.Append(dyn.Key("path")), - translateNoOp, + t.translateNoOp, }, - } { - v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + } +} + +func (t *translateContext) applyArtifactTranslations(v dyn.Value) (dyn.Value, error) { + var err error + + for _, rewritePattern := range t.artifactRewritePatterns() { + v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { key := p[1].Key() dir, err := v.Location().Directory() if err != nil { return dyn.InvalidValue, fmt.Errorf("unable to determine directory for artifact %s: %w", key, err) } - return m.rewriteRelativeTo(b, p, v, t.fn, dir, "") + return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, "") }) if err != nil { return dyn.InvalidValue, err diff --git a/bundle/config/mutator/translate_paths_jobs.go b/bundle/config/mutator/translate_paths_jobs.go index 58b5e0fb..60cc8bb9 100644 --- a/bundle/config/mutator/translate_paths_jobs.go +++ b/bundle/config/mutator/translate_paths_jobs.go @@ -4,7 +4,6 @@ import ( "fmt" "slices" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/libs/dyn" ) @@ -19,55 +18,42 @@ func noSkipRewrite(string) bool { return false } -func rewritePatterns(base dyn.Pattern) []jobRewritePattern { +func rewritePatterns(t *translateContext, base dyn.Pattern) []jobRewritePattern { return []jobRewritePattern{ { base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")), - translateNotebookPath, + t.translateNotebookPath, noSkipRewrite, }, { base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")), - translateFilePath, + t.translateFilePath, noSkipRewrite, }, { base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")), - translateDirectoryPath, + t.translateDirectoryPath, noSkipRewrite, }, { base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")), - translateFilePath, + t.translateFilePath, noSkipRewrite, }, { base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")), - translateNoOp, + t.translateNoOp, noSkipRewrite, }, { base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")), - translateNoOp, + t.translateNoOp, noSkipRewrite, }, } } -func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { - fallback, err := gatherFallbackPaths(v, "jobs") - if err != nil { - return dyn.InvalidValue, err - } - - // Do not translate job task paths if using Git source - var ignore []string - for key, job := range b.Config.Resources.Jobs { - if job.GitSource != nil { - ignore = append(ignore, key) - } - } - +func (t *translateContext) jobRewritePatterns() []jobRewritePattern { // Base pattern to match all tasks in all jobs. base := dyn.NewPattern( dyn.Key("resources"), @@ -90,19 +76,38 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy dyn.Key("dependencies"), dyn.AnyIndex(), ), - translateNoOpWithPrefix, + t.translateNoOpWithPrefix, func(s string) bool { return !libraries.IsEnvironmentDependencyLocal(s) }, }, } - taskPatterns := rewritePatterns(base) - forEachPatterns := rewritePatterns(base.Append(dyn.Key("for_each_task"), dyn.Key("task"))) + + taskPatterns := rewritePatterns(t, base) + forEachPatterns := rewritePatterns(t, base.Append(dyn.Key("for_each_task"), dyn.Key("task"))) allPatterns := append(taskPatterns, jobEnvironmentsPatterns...) allPatterns = append(allPatterns, forEachPatterns...) + return allPatterns +} - for _, t := range allPatterns { - v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { +func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error) { + var err error + + fallback, err := gatherFallbackPaths(v, "jobs") + if err != nil { + return dyn.InvalidValue, err + } + + // Do not translate job task paths if using Git source + var ignore []string + for key, job := range t.b.Config.Resources.Jobs { + if job.GitSource != nil { + ignore = append(ignore, key) + } + } + + for _, rewritePattern := range t.jobRewritePatterns() { + v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { key := p[2].Key() // Skip path translation if the job is using git source. @@ -116,10 +121,10 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy } sv := v.MustString() - if t.skipRewrite(sv) { + if rewritePattern.skipRewrite(sv) { return v, nil } - return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key]) + return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key]) }) if err != nil { return dyn.InvalidValue, err diff --git a/bundle/config/mutator/translate_paths_pipelines.go b/bundle/config/mutator/translate_paths_pipelines.go index 5b2a2c34..71a65e84 100644 --- a/bundle/config/mutator/translate_paths_pipelines.go +++ b/bundle/config/mutator/translate_paths_pipelines.go @@ -3,16 +3,15 @@ package mutator import ( "fmt" - "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/dyn" ) -func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) { - fallback, err := gatherFallbackPaths(v, "pipelines") - if err != nil { - return dyn.InvalidValue, err - } +type pipelineRewritePattern struct { + pattern dyn.Pattern + fn rewriteFunc +} +func (t *translateContext) pipelineRewritePatterns() []pipelineRewritePattern { // Base pattern to match all libraries in all pipelines. base := dyn.NewPattern( dyn.Key("resources"), @@ -22,27 +21,36 @@ func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value dyn.AnyIndex(), ) - for _, t := range []struct { - pattern dyn.Pattern - fn rewriteFunc - }{ + // Compile list of configuration paths to rewrite. + return []pipelineRewritePattern{ { base.Append(dyn.Key("notebook"), dyn.Key("path")), - translateNotebookPath, + t.translateNotebookPath, }, { base.Append(dyn.Key("file"), dyn.Key("path")), - translateFilePath, + t.translateFilePath, }, - } { - v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + } +} + +func (t *translateContext) applyPipelineTranslations(v dyn.Value) (dyn.Value, error) { + var err error + + fallback, err := gatherFallbackPaths(v, "pipelines") + if err != nil { + return dyn.InvalidValue, err + } + + for _, rewritePattern := range t.pipelineRewritePatterns() { + v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { key := p[2].Key() dir, err := v.Location().Directory() if err != nil { return dyn.InvalidValue, fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) } - return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key]) + return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key]) }) if err != nil { return dyn.InvalidValue, err From 8468878eed293c836a7c8a52e69ca38f84417980 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Jun 2024 14:51:17 +0200 Subject: [PATCH 2/9] Bump github.com/databricks/databricks-sdk-go from 0.42.0 to 0.43.0 (#1522) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.42.0 to 0.43.0.
Release notes

Sourced from github.com/databricks/databricks-sdk-go's releases.

v0.43.0

Major Changes and Improvements:

  • Support partners in user agent for SDK (#925).
  • Add serverless_compute_id field to the config (#952).

Other Changes:

  • Generate from latest spec (#944) and (#947).

API Changes:

OpenAPI SHA: 7437dabb9dadee402c1fc060df4c1ce8cc5369f0, Date: 2024-06-25

Changelog

Sourced from github.com/databricks/databricks-sdk-go's changelog.

0.43.0

Major Changes and Improvements:

  • Support partners in user agent for SDK (#925).
  • Add serverless_compute_id field to the config (#952).

Other Changes:

  • Generate from latest spec (#944) and (#947).

API Changes:

OpenAPI SHA: 7437dabb9dadee402c1fc060df4c1ce8cc5369f0, Date: 2024-06-25

Commits

Most Recent Ignore Conditions Applied to This Pull Request | Dependency Name | Ignore Conditions | | --- | --- | | github.com/databricks/databricks-sdk-go | [>= 0.28.a, < 0.29] |
[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.42.0&new-version=0.43.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Pieter Noordhuis --- .codegen/_openapi_sha | 2 +- .codegen/cmds-account.go.tmpl | 4 +- .codegen/cmds-workspace.go.tmpl | 4 +- .codegen/service.go.tmpl | 1 + bundle/schema/docs/bundle_descriptions.json | 121 +++- cmd/workspace/alerts/alerts.go | 44 +- cmd/workspace/apps/apps.go | 59 ++ cmd/workspace/catalogs/catalogs.go | 2 + cmd/workspace/dashboards/dashboards.go | 4 +- cmd/workspace/data-sources/data-sources.go | 14 +- .../external-locations/external-locations.go | 1 + cmd/workspace/functions/functions.go | 2 + cmd/workspace/jobs/jobs.go | 11 +- cmd/workspace/lakeview/lakeview.go | 653 ++++++++++++++++++ cmd/workspace/queries/queries.go | 51 +- .../storage-credentials.go | 1 + .../vector-search-indexes.go | 72 ++ go.mod | 2 +- go.sum | 4 +- 19 files changed, 1005 insertions(+), 47 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index de0f45ab..c4b47ca1 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -37b925eba37dfb3d7e05b6ba2d458454ce62d3a0 \ No newline at end of file +7437dabb9dadee402c1fc060df4c1ce8cc5369f0 \ No newline at end of file diff --git a/.codegen/cmds-account.go.tmpl b/.codegen/cmds-account.go.tmpl index 24b6bdd7..43834b69 100644 --- a/.codegen/cmds-account.go.tmpl +++ b/.codegen/cmds-account.go.tmpl @@ -7,7 +7,7 @@ package account import ( "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" - {{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) }} + {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }} {{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}} ) @@ -17,7 +17,7 @@ func New() *cobra.Command { Short: `Databricks Account Commands`, } - {{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) -}} + {{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}} cmd.AddCommand({{.SnakeName}}.New()) {{end}}{{end}}{{end}} diff --git a/.codegen/cmds-workspace.go.tmpl b/.codegen/cmds-workspace.go.tmpl index 244dde61..e29f05a5 100644 --- a/.codegen/cmds-workspace.go.tmpl +++ b/.codegen/cmds-workspace.go.tmpl @@ -14,14 +14,14 @@ package workspace import ( "github.com/databricks/cli/cmd/root" - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) }} + {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }} {{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}} ) func All() []*cobra.Command { var out []*cobra.Command - {{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) -}} + {{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}} out = append(out, {{.SnakeName}}.New()) {{end}}{{end}}{{end}} diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index ad482ebe..111745e4 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -22,6 +22,7 @@ import ( "dbsql-permissions" "account-access-control-proxy" "files" + "serving-endpoints-data-plane" }} {{if not (in $excludes .KebabName) }} diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index ab948b8b..380be054 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -79,6 +79,17 @@ "experimental": { "description": "", "properties": { + "pydabs": { + "description": "", + "properties": { + "enabled": { + "description": "" + }, + "venv_path": { + "description": "" + } + } + }, "python_wheel_wrapper": { "description": "" }, @@ -236,6 +247,12 @@ "description": "" } }, + "on_streaming_backlog_exceeded": { + "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", + "items": { + "description": "" + } + }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "items": { @@ -853,6 +870,12 @@ "description": "" } }, + "on_streaming_backlog_exceeded": { + "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", + "items": { + "description": "" + } + }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "items": { @@ -1595,6 +1618,17 @@ } } }, + "on_streaming_backlog_exceeded": { + "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", + "items": { + "description": "", + "properties": { + "id": { + "description": "" + } + } + } + }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "items": { @@ -1634,6 +1668,17 @@ "pause_status": { "description": "Whether this trigger is paused or not." }, + "periodic": { + "description": "Periodic trigger settings.", + "properties": { + "interval": { + "description": "The interval at which the trigger should run." + }, + "unit": { + "description": "The unit of time for the interval." + } + } + }, "table": { "description": "Old table trigger settings name. Deprecated in favor of `table_update`.", "properties": { @@ -1712,6 +1757,17 @@ } } }, + "on_streaming_backlog_exceeded": { + "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", + "items": { + "description": "", + "properties": { + "id": { + "description": "" + } + } + } + }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "items": { @@ -1740,16 +1796,16 @@ "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.", "properties": { "catalog_name": { - "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set." + "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled." }, "enabled": { - "description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again." + "description": "Indicates whether the inference table is enabled." }, "schema_name": { - "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set." + "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled." }, "table_name_prefix": { - "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set." + "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled." } } }, @@ -2623,7 +2679,7 @@ } }, "notebook": { - "description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n", + "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n", "properties": { "path": { "description": "The absolute path of the notebook." @@ -3167,6 +3223,12 @@ "description": "" } }, + "on_streaming_backlog_exceeded": { + "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", + "items": { + "description": "" + } + }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "items": { @@ -3784,6 +3846,12 @@ "description": "" } }, + "on_streaming_backlog_exceeded": { + "description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.", + "items": { + "description": "" + } + }, "on_success": { "description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.", "items": { @@ -4526,6 +4594,17 @@ } } }, + "on_streaming_backlog_exceeded": { + "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", + "items": { + "description": "", + "properties": { + "id": { + "description": "" + } + } + } + }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "items": { @@ -4565,6 +4644,17 @@ "pause_status": { "description": "Whether this trigger is paused or not." }, + "periodic": { + "description": "Periodic trigger settings.", + "properties": { + "interval": { + "description": "The interval at which the trigger should run." + }, + "unit": { + "description": "The unit of time for the interval." + } + } + }, "table": { "description": "Old table trigger settings name. Deprecated in favor of `table_update`.", "properties": { @@ -4643,6 +4733,17 @@ } } }, + "on_streaming_backlog_exceeded": { + "description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.", + "items": { + "description": "", + "properties": { + "id": { + "description": "" + } + } + } + }, "on_success": { "description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.", "items": { @@ -4671,16 +4772,16 @@ "description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.", "properties": { "catalog_name": { - "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set." + "description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled." }, "enabled": { - "description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again." + "description": "Indicates whether the inference table is enabled." }, "schema_name": { - "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set." + "description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled." }, "table_name_prefix": { - "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set." + "description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled." } } }, @@ -5554,7 +5655,7 @@ } }, "notebook": { - "description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n", + "description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n", "properties": { "path": { "description": "The absolute path of the notebook." diff --git a/cmd/workspace/alerts/alerts.go b/cmd/workspace/alerts/alerts.go index d4a7d02a..61c1e0ea 100755 --- a/cmd/workspace/alerts/alerts.go +++ b/cmd/workspace/alerts/alerts.go @@ -24,7 +24,12 @@ func New() *cobra.Command { Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the sql_task type of - the Jobs API, e.g. :method:jobs/create.`, + the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`, GroupID: "sql", Annotations: map[string]string{ "package": "sql", @@ -73,7 +78,12 @@ func newCreate() *cobra.Command { Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification - destinations if the condition was met.` + destinations if the condition was met. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -131,8 +141,13 @@ func newDelete() *cobra.Command { cmd.Long = `Delete an alert. Deletes an alert. Deleted alerts are no longer accessible and cannot be - restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to - the trash.` + restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to + the trash. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -199,7 +214,12 @@ func newGet() *cobra.Command { cmd.Short = `Get an alert.` cmd.Long = `Get an alert. - Gets an alert.` + Gets an alert. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -261,7 +281,12 @@ func newList() *cobra.Command { cmd.Short = `Get alerts.` cmd.Long = `Get alerts. - Gets a list of alerts.` + Gets a list of alerts. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -312,7 +337,12 @@ func newUpdate() *cobra.Command { cmd.Short = `Update an alert.` cmd.Long = `Update an alert. - Updates an alert.` + Updates an alert. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/apps/apps.go b/cmd/workspace/apps/apps.go index 46568e52..1572d4f4 100755 --- a/cmd/workspace/apps/apps.go +++ b/cmd/workspace/apps/apps.go @@ -42,6 +42,7 @@ func New() *cobra.Command { cmd.AddCommand(newGetEnvironment()) cmd.AddCommand(newList()) cmd.AddCommand(newListDeployments()) + cmd.AddCommand(newStart()) cmd.AddCommand(newStop()) cmd.AddCommand(newUpdate()) @@ -615,6 +616,64 @@ func newListDeployments() *cobra.Command { return cmd } +// start start command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startOverrides []func( + *cobra.Command, + *serving.StartAppRequest, +) + +func newStart() *cobra.Command { + cmd := &cobra.Command{} + + var startReq serving.StartAppRequest + + // TODO: short flags + + cmd.Use = "start NAME" + cmd.Short = `Start an app.` + cmd.Long = `Start an app. + + Start the last active deployment of the app in the workspace. + + Arguments: + NAME: The name of the app.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + startReq.Name = args[0] + + response, err := w.Apps.Start(ctx, startReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startOverrides { + fn(cmd, &startReq) + } + + return cmd +} + // start stop command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/catalogs/catalogs.go b/cmd/workspace/catalogs/catalogs.go index 8085b69e..a17bb007 100755 --- a/cmd/workspace/catalogs/catalogs.go +++ b/cmd/workspace/catalogs/catalogs.go @@ -273,6 +273,8 @@ func newList() *cobra.Command { // TODO: short flags cmd.Flags().BoolVar(&listReq.IncludeBrowse, "include-browse", listReq.IncludeBrowse, `Whether to include catalogs in the response for which the principal can only access selective metadata for.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of catalogs to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) cmd.Use = "list" cmd.Short = `List catalogs.` diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index 1a143538..fcab0aa2 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -268,8 +268,8 @@ func newList() *cobra.Command { Fetch a paginated list of dashboard objects. - ### **Warning: Calling this API concurrently 10 or more times could result in - throttling, service degradation, or a temporary ban.**` + **Warning**: Calling this API concurrently 10 or more times could result in + throttling, service degradation, or a temporary ban.` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/data-sources/data-sources.go b/cmd/workspace/data-sources/data-sources.go index 0f0f8541..f310fe50 100755 --- a/cmd/workspace/data-sources/data-sources.go +++ b/cmd/workspace/data-sources/data-sources.go @@ -25,7 +25,12 @@ func New() *cobra.Command { This API does not support searches. It returns the full list of SQL warehouses in your workspace. We advise you to use any text editor, REST client, or grep to search the response from this API for the name of your SQL warehouse - as it appears in Databricks SQL.`, + as it appears in Databricks SQL. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`, GroupID: "sql", Annotations: map[string]string{ "package": "sql", @@ -60,7 +65,12 @@ func newList() *cobra.Command { Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, - you need only a SQL warehouse's id to create new queries against it.` + you need only a SQL warehouse's id to create new queries against it. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/external-locations/external-locations.go b/cmd/workspace/external-locations/external-locations.go index bd63d3fa..8f0dd346 100755 --- a/cmd/workspace/external-locations/external-locations.go +++ b/cmd/workspace/external-locations/external-locations.go @@ -348,6 +348,7 @@ func newUpdate() *cobra.Command { cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) // TODO: complex arg: encryption_details cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) + cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the external location.`) cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) diff --git a/cmd/workspace/functions/functions.go b/cmd/workspace/functions/functions.go index 1aa6daf3..c8de4879 100755 --- a/cmd/workspace/functions/functions.go +++ b/cmd/workspace/functions/functions.go @@ -69,6 +69,8 @@ func newCreate() *cobra.Command { cmd.Short = `Create a function.` cmd.Long = `Create a function. + **WARNING: This API is experimental and will change in future versions** + Creates a new function The user must have the following permissions in order for the function to be diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index e31c3f08..50a04592 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -1502,24 +1502,15 @@ func newSubmit() *cobra.Command { cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list - // TODO: complex arg: condition_task - // TODO: complex arg: dbt_task // TODO: complex arg: email_notifications + // TODO: array: environments // TODO: complex arg: git_source // TODO: complex arg: health cmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) - // TODO: complex arg: notebook_task // TODO: complex arg: notification_settings - // TODO: complex arg: pipeline_task - // TODO: complex arg: python_wheel_task // TODO: complex arg: queue // TODO: complex arg: run_as - // TODO: complex arg: run_job_task cmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) - // TODO: complex arg: spark_jar_task - // TODO: complex arg: spark_python_task - // TODO: complex arg: spark_submit_task - // TODO: complex arg: sql_task // TODO: array: tasks cmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) // TODO: complex arg: webhook_notifications diff --git a/cmd/workspace/lakeview/lakeview.go b/cmd/workspace/lakeview/lakeview.go index 566853ff..36eab0e7 100755 --- a/cmd/workspace/lakeview/lakeview.go +++ b/cmd/workspace/lakeview/lakeview.go @@ -31,13 +31,23 @@ func New() *cobra.Command { // Add methods cmd.AddCommand(newCreate()) + cmd.AddCommand(newCreateSchedule()) + cmd.AddCommand(newCreateSubscription()) + cmd.AddCommand(newDeleteSchedule()) + cmd.AddCommand(newDeleteSubscription()) cmd.AddCommand(newGet()) cmd.AddCommand(newGetPublished()) + cmd.AddCommand(newGetSchedule()) + cmd.AddCommand(newGetSubscription()) + cmd.AddCommand(newList()) + cmd.AddCommand(newListSchedules()) + cmd.AddCommand(newListSubscriptions()) cmd.AddCommand(newMigrate()) cmd.AddCommand(newPublish()) cmd.AddCommand(newTrash()) cmd.AddCommand(newUnpublish()) cmd.AddCommand(newUpdate()) + cmd.AddCommand(newUpdateSchedule()) // Apply optional overrides to this command. for _, fn := range cmdOverrides { @@ -126,6 +136,277 @@ func newCreate() *cobra.Command { return cmd } +// start create-schedule command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createScheduleOverrides []func( + *cobra.Command, + *dashboards.CreateScheduleRequest, +) + +func newCreateSchedule() *cobra.Command { + cmd := &cobra.Command{} + + var createScheduleReq dashboards.CreateScheduleRequest + var createScheduleJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createScheduleJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createScheduleReq.DisplayName, "display-name", createScheduleReq.DisplayName, `The display name for schedule.`) + cmd.Flags().Var(&createScheduleReq.PauseStatus, "pause-status", `The status indicates whether this schedule is paused or not. Supported values: [PAUSED, UNPAUSED]`) + + cmd.Use = "create-schedule DASHBOARD_ID" + cmd.Short = `Create dashboard schedule.` + cmd.Long = `Create dashboard schedule. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createScheduleJson.Unmarshal(&createScheduleReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + createScheduleReq.DashboardId = args[0] + + response, err := w.Lakeview.CreateSchedule(ctx, createScheduleReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createScheduleOverrides { + fn(cmd, &createScheduleReq) + } + + return cmd +} + +// start create-subscription command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createSubscriptionOverrides []func( + *cobra.Command, + *dashboards.CreateSubscriptionRequest, +) + +func newCreateSubscription() *cobra.Command { + cmd := &cobra.Command{} + + var createSubscriptionReq dashboards.CreateSubscriptionRequest + var createSubscriptionJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createSubscriptionJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "create-subscription DASHBOARD_ID SCHEDULE_ID" + cmd.Short = `Create schedule subscription.` + cmd.Long = `Create schedule subscription. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the subscription belongs. + SCHEDULE_ID: UUID identifying the schedule to which the subscription belongs.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createSubscriptionJson.Unmarshal(&createSubscriptionReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + createSubscriptionReq.DashboardId = args[0] + createSubscriptionReq.ScheduleId = args[1] + + response, err := w.Lakeview.CreateSubscription(ctx, createSubscriptionReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createSubscriptionOverrides { + fn(cmd, &createSubscriptionReq) + } + + return cmd +} + +// start delete-schedule command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteScheduleOverrides []func( + *cobra.Command, + *dashboards.DeleteScheduleRequest, +) + +func newDeleteSchedule() *cobra.Command { + cmd := &cobra.Command{} + + var deleteScheduleReq dashboards.DeleteScheduleRequest + + // TODO: short flags + + cmd.Flags().StringVar(&deleteScheduleReq.Etag, "etag", deleteScheduleReq.Etag, `The etag for the schedule.`) + + cmd.Use = "delete-schedule DASHBOARD_ID SCHEDULE_ID" + cmd.Short = `Delete dashboard schedule.` + cmd.Long = `Delete dashboard schedule. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. + SCHEDULE_ID: UUID identifying the schedule.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteScheduleReq.DashboardId = args[0] + deleteScheduleReq.ScheduleId = args[1] + + err = w.Lakeview.DeleteSchedule(ctx, deleteScheduleReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteScheduleOverrides { + fn(cmd, &deleteScheduleReq) + } + + return cmd +} + +// start delete-subscription command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteSubscriptionOverrides []func( + *cobra.Command, + *dashboards.DeleteSubscriptionRequest, +) + +func newDeleteSubscription() *cobra.Command { + cmd := &cobra.Command{} + + var deleteSubscriptionReq dashboards.DeleteSubscriptionRequest + + // TODO: short flags + + cmd.Flags().StringVar(&deleteSubscriptionReq.Etag, "etag", deleteSubscriptionReq.Etag, `The etag for the subscription.`) + + cmd.Use = "delete-subscription DASHBOARD_ID SCHEDULE_ID SUBSCRIPTION_ID" + cmd.Short = `Delete schedule subscription.` + cmd.Long = `Delete schedule subscription. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard which the subscription belongs. + SCHEDULE_ID: UUID identifying the schedule which the subscription belongs. + SUBSCRIPTION_ID: UUID identifying the subscription.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteSubscriptionReq.DashboardId = args[0] + deleteSubscriptionReq.ScheduleId = args[1] + deleteSubscriptionReq.SubscriptionId = args[2] + + err = w.Lakeview.DeleteSubscription(ctx, deleteSubscriptionReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteSubscriptionOverrides { + fn(cmd, &deleteSubscriptionReq) + } + + return cmd +} + // start get command // Slice with functions to override default command behavior. @@ -242,6 +523,303 @@ func newGetPublished() *cobra.Command { return cmd } +// start get-schedule command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getScheduleOverrides []func( + *cobra.Command, + *dashboards.GetScheduleRequest, +) + +func newGetSchedule() *cobra.Command { + cmd := &cobra.Command{} + + var getScheduleReq dashboards.GetScheduleRequest + + // TODO: short flags + + cmd.Use = "get-schedule DASHBOARD_ID SCHEDULE_ID" + cmd.Short = `Get dashboard schedule.` + cmd.Long = `Get dashboard schedule. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. + SCHEDULE_ID: UUID identifying the schedule.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getScheduleReq.DashboardId = args[0] + getScheduleReq.ScheduleId = args[1] + + response, err := w.Lakeview.GetSchedule(ctx, getScheduleReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getScheduleOverrides { + fn(cmd, &getScheduleReq) + } + + return cmd +} + +// start get-subscription command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getSubscriptionOverrides []func( + *cobra.Command, + *dashboards.GetSubscriptionRequest, +) + +func newGetSubscription() *cobra.Command { + cmd := &cobra.Command{} + + var getSubscriptionReq dashboards.GetSubscriptionRequest + + // TODO: short flags + + cmd.Use = "get-subscription DASHBOARD_ID SCHEDULE_ID SUBSCRIPTION_ID" + cmd.Short = `Get schedule subscription.` + cmd.Long = `Get schedule subscription. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard which the subscription belongs. + SCHEDULE_ID: UUID identifying the schedule which the subscription belongs. + SUBSCRIPTION_ID: UUID identifying the subscription.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getSubscriptionReq.DashboardId = args[0] + getSubscriptionReq.ScheduleId = args[1] + getSubscriptionReq.SubscriptionId = args[2] + + response, err := w.Lakeview.GetSubscription(ctx, getSubscriptionReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getSubscriptionOverrides { + fn(cmd, &getSubscriptionReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *dashboards.ListDashboardsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq dashboards.ListDashboardsRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The number of dashboards to return per page.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A page token, received from a previous ListDashboards call.`) + cmd.Flags().BoolVar(&listReq.ShowTrashed, "show-trashed", listReq.ShowTrashed, `The flag to include dashboards located in the trash.`) + cmd.Flags().Var(&listReq.View, "view", `Indicates whether to include all metadata from the dashboard in the response. Supported values: [DASHBOARD_VIEW_BASIC, DASHBOARD_VIEW_FULL]`) + + cmd.Use = "list" + cmd.Short = `List dashboards.` + cmd.Long = `List dashboards.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + response := w.Lakeview.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start list-schedules command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSchedulesOverrides []func( + *cobra.Command, + *dashboards.ListSchedulesRequest, +) + +func newListSchedules() *cobra.Command { + cmd := &cobra.Command{} + + var listSchedulesReq dashboards.ListSchedulesRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listSchedulesReq.PageSize, "page-size", listSchedulesReq.PageSize, `The number of schedules to return per page.`) + cmd.Flags().StringVar(&listSchedulesReq.PageToken, "page-token", listSchedulesReq.PageToken, `A page token, received from a previous ListSchedules call.`) + + cmd.Use = "list-schedules DASHBOARD_ID" + cmd.Short = `List dashboard schedules.` + cmd.Long = `List dashboard schedules. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listSchedulesReq.DashboardId = args[0] + + response := w.Lakeview.ListSchedules(ctx, listSchedulesReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSchedulesOverrides { + fn(cmd, &listSchedulesReq) + } + + return cmd +} + +// start list-subscriptions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSubscriptionsOverrides []func( + *cobra.Command, + *dashboards.ListSubscriptionsRequest, +) + +func newListSubscriptions() *cobra.Command { + cmd := &cobra.Command{} + + var listSubscriptionsReq dashboards.ListSubscriptionsRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listSubscriptionsReq.PageSize, "page-size", listSubscriptionsReq.PageSize, `The number of subscriptions to return per page.`) + cmd.Flags().StringVar(&listSubscriptionsReq.PageToken, "page-token", listSubscriptionsReq.PageToken, `A page token, received from a previous ListSubscriptions call.`) + + cmd.Use = "list-subscriptions DASHBOARD_ID SCHEDULE_ID" + cmd.Short = `List schedule subscriptions.` + cmd.Long = `List schedule subscriptions. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the subscription belongs. + SCHEDULE_ID: UUID identifying the schedule to which the subscription belongs.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listSubscriptionsReq.DashboardId = args[0] + listSubscriptionsReq.ScheduleId = args[1] + + response := w.Lakeview.ListSubscriptions(ctx, listSubscriptionsReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSubscriptionsOverrides { + fn(cmd, &listSubscriptionsReq) + } + + return cmd +} + // start migrate command // Slice with functions to override default command behavior. @@ -576,4 +1154,79 @@ func newUpdate() *cobra.Command { return cmd } +// start update-schedule command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateScheduleOverrides []func( + *cobra.Command, + *dashboards.UpdateScheduleRequest, +) + +func newUpdateSchedule() *cobra.Command { + cmd := &cobra.Command{} + + var updateScheduleReq dashboards.UpdateScheduleRequest + var updateScheduleJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateScheduleJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&updateScheduleReq.DisplayName, "display-name", updateScheduleReq.DisplayName, `The display name for schedule.`) + cmd.Flags().StringVar(&updateScheduleReq.Etag, "etag", updateScheduleReq.Etag, `The etag for the schedule.`) + cmd.Flags().Var(&updateScheduleReq.PauseStatus, "pause-status", `The status indicates whether this schedule is paused or not. Supported values: [PAUSED, UNPAUSED]`) + + cmd.Use = "update-schedule DASHBOARD_ID SCHEDULE_ID" + cmd.Short = `Update dashboard schedule.` + cmd.Long = `Update dashboard schedule. + + Arguments: + DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. + SCHEDULE_ID: UUID identifying the schedule.` + + // This command is being previewed; hide from help output. + cmd.Hidden = true + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateScheduleJson.Unmarshal(&updateScheduleReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + updateScheduleReq.DashboardId = args[0] + updateScheduleReq.ScheduleId = args[1] + + response, err := w.Lakeview.UpdateSchedule(ctx, updateScheduleReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateScheduleOverrides { + fn(cmd, &updateScheduleReq) + } + + return cmd +} + // end service Lakeview diff --git a/cmd/workspace/queries/queries.go b/cmd/workspace/queries/queries.go index b96eb715..65013197 100755 --- a/cmd/workspace/queries/queries.go +++ b/cmd/workspace/queries/queries.go @@ -23,7 +23,12 @@ func New() *cobra.Command { Long: `These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the - sql_task type of the Jobs API, e.g. :method:jobs/create.`, + sql_task type of the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`, GroupID: "sql", Annotations: map[string]string{ "package": "sql", @@ -76,7 +81,12 @@ func newCreate() *cobra.Command { available SQL warehouses. Or you can copy the data_source_id from an existing query. - **Note**: You cannot add a visualization until you create the query.` + **Note**: You cannot add a visualization until you create the query. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -135,7 +145,12 @@ func newDelete() *cobra.Command { Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is - deleted after 30 days.` + deleted after 30 days. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -203,7 +218,12 @@ func newGet() *cobra.Command { cmd.Long = `Get a query definition. Retrieve a query object definition along with contextual permissions - information about the currently authenticated user.` + information about the currently authenticated user. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -278,8 +298,13 @@ func newList() *cobra.Command { Gets a list of queries. Optionally, this list can be filtered by a search term. - ### **Warning: Calling this API concurrently 10 or more times could result in - throttling, service degradation, or a temporary ban.**` + **Warning**: Calling this API concurrently 10 or more times could result in + throttling, service degradation, or a temporary ban. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -330,7 +355,12 @@ func newRestore() *cobra.Command { cmd.Long = `Restore a query. Restore a query that has been moved to the trash. A restored query appears in - list views and searches. You can use restored queries for alerts.` + list views and searches. You can use restored queries for alerts. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) @@ -409,7 +439,12 @@ func newUpdate() *cobra.Command { Modify this query definition. - **Note**: You cannot undo this operation.` + **Note**: You cannot undo this operation. + + **Note**: A new version of the Databricks SQL API will soon be available. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources` cmd.Annotations = make(map[string]string) diff --git a/cmd/workspace/storage-credentials/storage-credentials.go b/cmd/workspace/storage-credentials/storage-credentials.go index 32594503..18656a61 100755 --- a/cmd/workspace/storage-credentials/storage-credentials.go +++ b/cmd/workspace/storage-credentials/storage-credentials.go @@ -366,6 +366,7 @@ func newUpdate() *cobra.Command { cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`) // TODO: complex arg: databricks_gcp_service_account cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`) + cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the storage credential.`) cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`) cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) diff --git a/cmd/workspace/vector-search-indexes/vector-search-indexes.go b/cmd/workspace/vector-search-indexes/vector-search-indexes.go index dff8176e..15847477 100755 --- a/cmd/workspace/vector-search-indexes/vector-search-indexes.go +++ b/cmd/workspace/vector-search-indexes/vector-search-indexes.go @@ -42,6 +42,7 @@ func New() *cobra.Command { cmd.AddCommand(newGetIndex()) cmd.AddCommand(newListIndexes()) cmd.AddCommand(newQueryIndex()) + cmd.AddCommand(newQueryNextPage()) cmd.AddCommand(newScanIndex()) cmd.AddCommand(newSyncIndex()) cmd.AddCommand(newUpsertDataVectorIndex()) @@ -416,6 +417,7 @@ func newQueryIndex() *cobra.Command { cmd.Flags().StringVar(&queryIndexReq.FiltersJson, "filters-json", queryIndexReq.FiltersJson, `JSON string representing query filters.`) cmd.Flags().IntVar(&queryIndexReq.NumResults, "num-results", queryIndexReq.NumResults, `Number of results to return.`) cmd.Flags().StringVar(&queryIndexReq.QueryText, "query-text", queryIndexReq.QueryText, `Query text.`) + cmd.Flags().StringVar(&queryIndexReq.QueryType, "query-type", queryIndexReq.QueryType, `The query type to use.`) // TODO: array: query_vector cmd.Flags().Float64Var(&queryIndexReq.ScoreThreshold, "score-threshold", queryIndexReq.ScoreThreshold, `Threshold for the approximate nearest neighbor search.`) @@ -469,6 +471,76 @@ func newQueryIndex() *cobra.Command { return cmd } +// start query-next-page command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var queryNextPageOverrides []func( + *cobra.Command, + *vectorsearch.QueryVectorIndexNextPageRequest, +) + +func newQueryNextPage() *cobra.Command { + cmd := &cobra.Command{} + + var queryNextPageReq vectorsearch.QueryVectorIndexNextPageRequest + var queryNextPageJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&queryNextPageJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&queryNextPageReq.EndpointName, "endpoint-name", queryNextPageReq.EndpointName, `Name of the endpoint.`) + cmd.Flags().StringVar(&queryNextPageReq.PageToken, "page-token", queryNextPageReq.PageToken, `Page token returned from previous QueryVectorIndex or QueryVectorIndexNextPage API.`) + + cmd.Use = "query-next-page INDEX_NAME" + cmd.Short = `Query next page.` + cmd.Long = `Query next page. + + Use next_page_token returned from previous QueryVectorIndex or + QueryVectorIndexNextPage request to fetch next page of results. + + Arguments: + INDEX_NAME: Name of the vector index to query.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = queryNextPageJson.Unmarshal(&queryNextPageReq) + if err != nil { + return err + } + } + queryNextPageReq.IndexName = args[0] + + response, err := w.VectorSearchIndexes.QueryNextPage(ctx, queryNextPageReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range queryNextPageOverrides { + fn(cmd, &queryNextPageReq) + } + + return cmd +} + // start scan-index command // Slice with functions to override default command behavior. diff --git a/go.mod b/go.mod index bcfbae47..2dfbf46c 100644 --- a/go.mod +++ b/go.mod @@ -5,7 +5,7 @@ go 1.21 require ( github.com/Masterminds/semver/v3 v3.2.1 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.42.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.43.0 // Apache 2.0 github.com/fatih/color v1.17.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.6.0 // BSD-3-Clause diff --git a/go.sum b/go.sum index 0f4f62d9..864b7919 100644 --- a/go.sum +++ b/go.sum @@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/databricks/databricks-sdk-go v0.42.0 h1:WKdoqnvb+jvsR9+IYkC3P4BH5eJHRzVOr59y3mCoY+s= -github.com/databricks/databricks-sdk-go v0.42.0/go.mod h1:a9rr0FOHLL26kOjQjZZVFjIYmRABCbrAWVeundDEVG8= +github.com/databricks/databricks-sdk-go v0.43.0 h1:x4laolWhYlsQg2t8yWEGyRPZy4/Wv3pKnLEoJfVin7I= +github.com/databricks/databricks-sdk-go v0.43.0/go.mod h1:a9rr0FOHLL26kOjQjZZVFjIYmRABCbrAWVeundDEVG8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= From dac5f09556875003986832f74829bdbc326e725f Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 25 Jun 2024 19:10:21 +0530 Subject: [PATCH 3/9] Retain location metadata for values in `convert.FromTyped` (#1523) ## Changes There are four different treatments location metadata can receive in the `convert.FromTyped` method. 1. Location metadata is **retained** for maps, structs and slices if the value is **not nil** 2. Location metadata is **lost** for maps, structs and slices if the value is **is nil** 3. Location metadata is **retained** if a scalar type (eg. bool, string etc) does not change. 4. Location metadata is **lost** if the value for a scalar type changes. This PR ensures that location metadata is not lost in any case; that is, it's always preserved. For (2), this serves as a bug fix so that location information is not lost on conversion to and from typed for nil values of complex types (struct, slices, and maps). For (4) this is a change in semantics. For primitive values modified in a `typed` mutator, any references to `.Location()` for computed primitive fields will now return associated YAML location metadata (if any) instead of an empty location. While arguable, these semantics are OK since: 1. Situations like these will be rare. 2. Knowing the YAML location (if any) is better than not knowing the location at all. These locations are typically visible to the user in errors and warnings. ## Tests Unit tests --- libs/dyn/convert/from_typed.go | 34 +++++---- libs/dyn/convert/from_typed_test.go | 109 +++++++++++++++++++++------- 2 files changed, 105 insertions(+), 38 deletions(-) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index af49a07a..258ade4e 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -42,7 +42,7 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, // Dereference pointer if necessary for srcv.Kind() == reflect.Pointer { if srcv.IsNil() { - return dyn.NilValue, nil + return dyn.NilValue.WithLocation(ref.Location()), nil } srcv = srcv.Elem() @@ -55,27 +55,35 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, } } + var v dyn.Value + var err error switch srcv.Kind() { case reflect.Struct: - return fromTypedStruct(srcv, ref, options...) + v, err = fromTypedStruct(srcv, ref, options...) case reflect.Map: - return fromTypedMap(srcv, ref) + v, err = fromTypedMap(srcv, ref) case reflect.Slice: - return fromTypedSlice(srcv, ref) + v, err = fromTypedSlice(srcv, ref) case reflect.String: - return fromTypedString(srcv, ref, options...) + v, err = fromTypedString(srcv, ref, options...) case reflect.Bool: - return fromTypedBool(srcv, ref, options...) + v, err = fromTypedBool(srcv, ref, options...) case reflect.Int, reflect.Int32, reflect.Int64: - return fromTypedInt(srcv, ref, options...) + v, err = fromTypedInt(srcv, ref, options...) case reflect.Float32, reflect.Float64: - return fromTypedFloat(srcv, ref, options...) + v, err = fromTypedFloat(srcv, ref, options...) case reflect.Invalid: // If the value is untyped and not set (e.g. any type with nil value), we return nil. - return dyn.NilValue, nil + v, err = dyn.NilValue, nil + default: + return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) } - return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) + // Ensure the location metadata is retained. + if err != nil { + return dyn.InvalidValue, err + } + return v.WithLocation(ref.Location()), err } func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -117,7 +125,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio // 2. The reference is a map (i.e. the struct was and still is empty). // 3. The "includeZeroValues" option is set (i.e. the struct is a non-nil pointer). if out.Len() > 0 || ref.Kind() == dyn.KindMap || slices.Contains(options, includeZeroValues) { - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } // Otherwise, return nil. @@ -164,7 +172,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out.Set(refk, nv) } - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { @@ -199,7 +207,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out[i] = nv } - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } func fromTypedString(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index e5447fe8..c2c17a57 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -49,7 +49,7 @@ func TestFromTypedStructPointerZeroFields(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) - // For an initialized pointer with a nil reference we expect a nil. + // For an initialized pointer with a nil reference we expect an empty map. src = &Tmp{} nv, err = FromTyped(src, dyn.NilValue) require.NoError(t, err) @@ -103,7 +103,7 @@ func TestFromTypedStructSetFields(t *testing.T) { }), nv) } -func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedStructSetFieldsRetainLocation(t *testing.T) { type Tmp struct { Foo string `json:"foo"` Bar string `json:"bar"` @@ -122,11 +122,9 @@ func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their location. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) } func TestFromTypedStringMapWithZeroValue(t *testing.T) { @@ -354,7 +352,7 @@ func TestFromTypedMapNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { var src = map[string]string{ "foo": "bar", "bar": "qux", @@ -368,11 +366,9 @@ func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their locations. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) } func TestFromTypedMapFieldWithZeroValue(t *testing.T) { @@ -429,7 +425,7 @@ func TestFromTypedSliceNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { var src = []string{ "foo", "bar", @@ -437,17 +433,15 @@ func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { ref := dyn.V([]dyn.Value{ dyn.NewValue("foo", dyn.Location{File: "foo"}), - dyn.NewValue("baz", dyn.Location{File: "baz"}), + dyn.NewValue("bar", dyn.Location{File: "bar"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their locations. assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv.Index(0)) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("bar", dyn.Location{}), nv.Index(1)) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "bar"}), nv.Index(1)) } func TestFromTypedStringEmpty(t *testing.T) { @@ -482,12 +476,20 @@ func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V("new"), nv) } -func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { - var src string = "foo" +func TestFromTypedStringRetainsLocations(t *testing.T) { var ref = dyn.NewValue("foo", dyn.Location{File: "foo"}) + + // case: value has not been changed + var src string = "foo" nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = "bar" + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv) } func TestFromTypedStringTypeError(t *testing.T) { @@ -529,12 +531,20 @@ func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(true), nv) } -func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { - var src bool = true +func TestFromTypedBoolRetainsLocations(t *testing.T) { var ref = dyn.NewValue(true, dyn.Location{File: "foo"}) + + // case: value has not been changed + var src bool = true nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(true, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = false + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "foo"}), nv) } func TestFromTypedBoolVariableReference(t *testing.T) { @@ -584,12 +594,20 @@ func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(int64(1234)), nv) } -func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) { - var src int = 1234 +func TestFromTypedIntRetainsLocations(t *testing.T) { var ref = dyn.NewValue(1234, dyn.Location{File: "foo"}) + + // case: value has not been changed + var src int = 1234 nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1234, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = 1235 + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(int64(1235), dyn.Location{File: "foo"}), nv) } func TestFromTypedIntVariableReference(t *testing.T) { @@ -639,12 +657,21 @@ func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(1.23), nv) } -func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) { - var src float64 = 1.23 +func TestFromTypedFloatRetainsLocations(t *testing.T) { + var src float64 var ref = dyn.NewValue(1.23, dyn.Location{File: "foo"}) + + // case: value has not been changed + src = 1.23 nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1.23, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = 1.24 + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(1.24, dyn.Location{File: "foo"}), nv) } func TestFromTypedFloatVariableReference(t *testing.T) { @@ -669,3 +696,35 @@ func TestFromTypedAnyNil(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) } + +func TestFromTypedNilPointerRetainsLocations(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + var src *Tmp + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} + +func TestFromTypedNilMapRetainsLocation(t *testing.T) { + var src map[string]string + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} + +func TestFromTypedNilSliceRetainsLocation(t *testing.T) { + var src []string + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} From 482d83cba82bf87b6f9d9d52a04631c792183210 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jun 2024 11:26:40 +0200 Subject: [PATCH 4/9] Revert "Retain location metadata for values in `convert.FromTyped`" (#1528) ## Changes This reverts commit dac5f09556875003986832f74829bdbc326e725f (#1523). Retaining the location for nil values means equality checks no longer pass. We need #1520 to be merged first. ## Tests Integration test `TestAccPythonWheelTaskDeployAndRunWithWrapper`. --- libs/dyn/convert/from_typed.go | 34 ++++----- libs/dyn/convert/from_typed_test.go | 109 +++++++--------------------- 2 files changed, 38 insertions(+), 105 deletions(-) diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index 258ade4e..af49a07a 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -42,7 +42,7 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, // Dereference pointer if necessary for srcv.Kind() == reflect.Pointer { if srcv.IsNil() { - return dyn.NilValue.WithLocation(ref.Location()), nil + return dyn.NilValue, nil } srcv = srcv.Elem() @@ -55,35 +55,27 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, } } - var v dyn.Value - var err error switch srcv.Kind() { case reflect.Struct: - v, err = fromTypedStruct(srcv, ref, options...) + return fromTypedStruct(srcv, ref, options...) case reflect.Map: - v, err = fromTypedMap(srcv, ref) + return fromTypedMap(srcv, ref) case reflect.Slice: - v, err = fromTypedSlice(srcv, ref) + return fromTypedSlice(srcv, ref) case reflect.String: - v, err = fromTypedString(srcv, ref, options...) + return fromTypedString(srcv, ref, options...) case reflect.Bool: - v, err = fromTypedBool(srcv, ref, options...) + return fromTypedBool(srcv, ref, options...) case reflect.Int, reflect.Int32, reflect.Int64: - v, err = fromTypedInt(srcv, ref, options...) + return fromTypedInt(srcv, ref, options...) case reflect.Float32, reflect.Float64: - v, err = fromTypedFloat(srcv, ref, options...) + return fromTypedFloat(srcv, ref, options...) case reflect.Invalid: // If the value is untyped and not set (e.g. any type with nil value), we return nil. - v, err = dyn.NilValue, nil - default: - return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) + return dyn.NilValue, nil } - // Ensure the location metadata is retained. - if err != nil { - return dyn.InvalidValue, err - } - return v.WithLocation(ref.Location()), err + return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) } func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -125,7 +117,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio // 2. The reference is a map (i.e. the struct was and still is empty). // 3. The "includeZeroValues" option is set (i.e. the struct is a non-nil pointer). if out.Len() > 0 || ref.Kind() == dyn.KindMap || slices.Contains(options, includeZeroValues) { - return dyn.V(out), nil + return dyn.NewValue(out, ref.Location()), nil } // Otherwise, return nil. @@ -172,7 +164,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out.Set(refk, nv) } - return dyn.V(out), nil + return dyn.NewValue(out, ref.Location()), nil } func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { @@ -207,7 +199,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out[i] = nv } - return dyn.V(out), nil + return dyn.NewValue(out, ref.Location()), nil } func fromTypedString(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index c2c17a57..e5447fe8 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -49,7 +49,7 @@ func TestFromTypedStructPointerZeroFields(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) - // For an initialized pointer with a nil reference we expect an empty map. + // For an initialized pointer with a nil reference we expect a nil. src = &Tmp{} nv, err = FromTyped(src, dyn.NilValue) require.NoError(t, err) @@ -103,7 +103,7 @@ func TestFromTypedStructSetFields(t *testing.T) { }), nv) } -func TestFromTypedStructSetFieldsRetainLocation(t *testing.T) { +func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { type Tmp struct { Foo string `json:"foo"` Bar string `json:"bar"` @@ -122,9 +122,11 @@ func TestFromTypedStructSetFieldsRetainLocation(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo and bar have retained their location. + // Assert foo has retained its location. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) } func TestFromTypedStringMapWithZeroValue(t *testing.T) { @@ -352,7 +354,7 @@ func TestFromTypedMapNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { +func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { var src = map[string]string{ "foo": "bar", "bar": "qux", @@ -366,9 +368,11 @@ func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo and bar have retained their locations. + // Assert foo has retained its location. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) } func TestFromTypedMapFieldWithZeroValue(t *testing.T) { @@ -425,7 +429,7 @@ func TestFromTypedSliceNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { +func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { var src = []string{ "foo", "bar", @@ -433,15 +437,17 @@ func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { ref := dyn.V([]dyn.Value{ dyn.NewValue("foo", dyn.Location{File: "foo"}), - dyn.NewValue("bar", dyn.Location{File: "bar"}), + dyn.NewValue("baz", dyn.Location{File: "baz"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo and bar have retained their locations. + // Assert foo has retained its location. assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv.Index(0)) - assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "bar"}), nv.Index(1)) + + // Assert bar lost its location (because it was overwritten). + assert.Equal(t, dyn.NewValue("bar", dyn.Location{}), nv.Index(1)) } func TestFromTypedStringEmpty(t *testing.T) { @@ -476,20 +482,12 @@ func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V("new"), nv) } -func TestFromTypedStringRetainsLocations(t *testing.T) { - var ref = dyn.NewValue("foo", dyn.Location{File: "foo"}) - - // case: value has not been changed +func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { var src string = "foo" + var ref = dyn.NewValue("foo", dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv) - - // case: value has been changed - src = "bar" - nv, err = FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv) } func TestFromTypedStringTypeError(t *testing.T) { @@ -531,20 +529,12 @@ func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(true), nv) } -func TestFromTypedBoolRetainsLocations(t *testing.T) { - var ref = dyn.NewValue(true, dyn.Location{File: "foo"}) - - // case: value has not been changed +func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { var src bool = true + var ref = dyn.NewValue(true, dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(true, dyn.Location{File: "foo"}), nv) - - // case: value has been changed - src = false - nv, err = FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "foo"}), nv) } func TestFromTypedBoolVariableReference(t *testing.T) { @@ -594,20 +584,12 @@ func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(int64(1234)), nv) } -func TestFromTypedIntRetainsLocations(t *testing.T) { - var ref = dyn.NewValue(1234, dyn.Location{File: "foo"}) - - // case: value has not been changed +func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) { var src int = 1234 + var ref = dyn.NewValue(1234, dyn.Location{File: "foo"}) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1234, dyn.Location{File: "foo"}), nv) - - // case: value has been changed - src = 1235 - nv, err = FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(int64(1235), dyn.Location{File: "foo"}), nv) } func TestFromTypedIntVariableReference(t *testing.T) { @@ -657,21 +639,12 @@ func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(1.23), nv) } -func TestFromTypedFloatRetainsLocations(t *testing.T) { - var src float64 +func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) { + var src float64 = 1.23 var ref = dyn.NewValue(1.23, dyn.Location{File: "foo"}) - - // case: value has not been changed - src = 1.23 nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1.23, dyn.Location{File: "foo"}), nv) - - // case: value has been changed - src = 1.24 - nv, err = FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(1.24, dyn.Location{File: "foo"}), nv) } func TestFromTypedFloatVariableReference(t *testing.T) { @@ -696,35 +669,3 @@ func TestFromTypedAnyNil(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) } - -func TestFromTypedNilPointerRetainsLocations(t *testing.T) { - type Tmp struct { - Foo string `json:"foo"` - Bar string `json:"bar"` - } - - var src *Tmp - ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) - - nv, err := FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) -} - -func TestFromTypedNilMapRetainsLocation(t *testing.T) { - var src map[string]string - ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) - - nv, err := FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) -} - -func TestFromTypedNilSliceRetainsLocation(t *testing.T) { - var src []string - ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) - - nv, err := FromTyped(src, ref) - require.NoError(t, err) - assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) -} From ce5a3f2ce6d2dd2cc39ed254a557f6bf68c8e9b7 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jun 2024 11:29:46 +0200 Subject: [PATCH 5/9] Upgrade TF provider to 1.48.0 (#1527) ## Changes This includes a fix for library order not being respected. ## Tests Manually confirmed the fix works in https://github.com/databricks/bundle-examples/pull/29. --- bundle/internal/tf/codegen/schema/version.go | 2 +- bundle/internal/tf/schema/config.go | 1 + .../schema/data_source_external_location.go | 1 + bundle/internal/tf/schema/data_source_job.go | 18 ++++++++++++++ .../schema/data_source_storage_credential.go | 1 + bundle/internal/tf/schema/resource_job.go | 24 +++++++++++++++++++ .../tf/schema/resource_online_table.go | 9 +++---- bundle/internal/tf/schema/root.go | 2 +- 8 files changed, 52 insertions(+), 6 deletions(-) diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go index 9595433a..a99f15a4 100644 --- a/bundle/internal/tf/codegen/schema/version.go +++ b/bundle/internal/tf/codegen/schema/version.go @@ -1,3 +1,3 @@ package schema -const ProviderVersion = "1.47.0" +const ProviderVersion = "1.48.0" diff --git a/bundle/internal/tf/schema/config.go b/bundle/internal/tf/schema/config.go index d24d5733..a2de987e 100644 --- a/bundle/internal/tf/schema/config.go +++ b/bundle/internal/tf/schema/config.go @@ -28,6 +28,7 @@ type Config struct { Profile string `json:"profile,omitempty"` RateLimit int `json:"rate_limit,omitempty"` RetryTimeoutSeconds int `json:"retry_timeout_seconds,omitempty"` + ServerlessComputeId string `json:"serverless_compute_id,omitempty"` SkipVerify bool `json:"skip_verify,omitempty"` Token string `json:"token,omitempty"` Username string `json:"username,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_external_location.go b/bundle/internal/tf/schema/data_source_external_location.go index 0fea6e52..a3e78cbd 100644 --- a/bundle/internal/tf/schema/data_source_external_location.go +++ b/bundle/internal/tf/schema/data_source_external_location.go @@ -19,6 +19,7 @@ type DataSourceExternalLocationExternalLocationInfo struct { CreatedBy string `json:"created_by,omitempty"` CredentialId string `json:"credential_id,omitempty"` CredentialName string `json:"credential_name,omitempty"` + IsolationMode string `json:"isolation_mode,omitempty"` MetastoreId string `json:"metastore_id,omitempty"` Name string `json:"name,omitempty"` Owner string `json:"owner,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index d517bbe0..727848ce 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -26,6 +26,7 @@ type DataSourceJobJobSettingsSettingsEmailNotifications struct { OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -500,6 +501,7 @@ type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -529,6 +531,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskEmailNotifications struc OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -824,6 +827,10 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSt Id string `json:"id"` } +type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -832,6 +839,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotifications str OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } @@ -1163,6 +1171,10 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart struct { Id string `json:"id"` } +type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -1171,6 +1183,7 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotifications struct { OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } @@ -1236,6 +1249,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart struct { Id string `json:"id"` } +type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -1244,6 +1261,7 @@ type DataSourceJobJobSettingsSettingsWebhookNotifications struct { OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_storage_credential.go b/bundle/internal/tf/schema/data_source_storage_credential.go index c7045d44..bf58f272 100644 --- a/bundle/internal/tf/schema/data_source_storage_credential.go +++ b/bundle/internal/tf/schema/data_source_storage_credential.go @@ -36,6 +36,7 @@ type DataSourceStorageCredentialStorageCredentialInfo struct { CreatedAt int `json:"created_at,omitempty"` CreatedBy string `json:"created_by,omitempty"` Id string `json:"id,omitempty"` + IsolationMode string `json:"isolation_mode,omitempty"` MetastoreId string `json:"metastore_id,omitempty"` Name string `json:"name,omitempty"` Owner string `json:"owner,omitempty"` diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index 0950073e..42b648b0 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -26,6 +26,7 @@ type ResourceJobEmailNotifications struct { OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -573,6 +574,7 @@ type ResourceJobTaskEmailNotifications struct { OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -602,6 +604,7 @@ type ResourceJobTaskForEachTaskTaskEmailNotifications struct { OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []string `json:"on_failure,omitempty"` OnStart []string `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []string `json:"on_success,omitempty"` } @@ -943,6 +946,10 @@ type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart struct { Id string `json:"id"` } +type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -951,6 +958,7 @@ type ResourceJobTaskForEachTaskTaskWebhookNotifications struct { OnDurationWarningThresholdExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } @@ -1329,6 +1337,10 @@ type ResourceJobTaskWebhookNotificationsOnStart struct { Id string `json:"id"` } +type ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type ResourceJobTaskWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -1337,6 +1349,7 @@ type ResourceJobTaskWebhookNotifications struct { OnDurationWarningThresholdExceeded []ResourceJobTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []ResourceJobTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []ResourceJobTaskWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []ResourceJobTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } @@ -1378,6 +1391,11 @@ type ResourceJobTriggerFileArrival struct { WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"` } +type ResourceJobTriggerPeriodic struct { + Interval int `json:"interval"` + Unit string `json:"unit"` +} + type ResourceJobTriggerTable struct { Condition string `json:"condition,omitempty"` MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"` @@ -1395,6 +1413,7 @@ type ResourceJobTriggerTableUpdate struct { type ResourceJobTrigger struct { PauseStatus string `json:"pause_status,omitempty"` FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"` + Periodic *ResourceJobTriggerPeriodic `json:"periodic,omitempty"` Table *ResourceJobTriggerTable `json:"table,omitempty"` TableUpdate *ResourceJobTriggerTableUpdate `json:"table_update,omitempty"` } @@ -1411,6 +1430,10 @@ type ResourceJobWebhookNotificationsOnStart struct { Id string `json:"id"` } +type ResourceJobWebhookNotificationsOnStreamingBacklogExceeded struct { + Id string `json:"id"` +} + type ResourceJobWebhookNotificationsOnSuccess struct { Id string `json:"id"` } @@ -1419,6 +1442,7 @@ type ResourceJobWebhookNotifications struct { OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnStreamingBacklogExceeded []ResourceJobWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"` OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } diff --git a/bundle/internal/tf/schema/resource_online_table.go b/bundle/internal/tf/schema/resource_online_table.go index af8a348d..de671ead 100644 --- a/bundle/internal/tf/schema/resource_online_table.go +++ b/bundle/internal/tf/schema/resource_online_table.go @@ -19,8 +19,9 @@ type ResourceOnlineTableSpec struct { } type ResourceOnlineTable struct { - Id string `json:"id,omitempty"` - Name string `json:"name"` - Status []any `json:"status,omitempty"` - Spec *ResourceOnlineTableSpec `json:"spec,omitempty"` + Id string `json:"id,omitempty"` + Name string `json:"name"` + Status []any `json:"status,omitempty"` + TableServingUrl string `json:"table_serving_url,omitempty"` + Spec *ResourceOnlineTableSpec `json:"spec,omitempty"` } diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 53f89203..39db3ea2 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -21,7 +21,7 @@ type Root struct { const ProviderHost = "registry.terraform.io" const ProviderSource = "databricks/databricks" -const ProviderVersion = "1.47.0" +const ProviderVersion = "1.48.0" func NewRoot() *Root { return &Root{ From 5f4279160990b2fd4a4d522292e1ff832f307892 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 26 Jun 2024 12:25:32 +0200 Subject: [PATCH 6/9] Added support for complex variables (#1467) ## Changes Added support for complex variables Now it's possible to add and use complex variables as shown below ``` bundle: name: complex-variables resources: jobs: my_job: job_clusters: - job_cluster_key: key new_cluster: ${var.cluster} tasks: - task_key: test job_cluster_key: key variables: cluster: description: "A cluster definition" type: complex default: spark_version: "13.2.x-scala2.11" node_type_id: "Standard_DS3_v2" num_workers: 2 spark_conf: spark.speculation: true spark.databricks.delta.retentionDurationCheck.enabled: false ``` Fixes #1298 - [x] Support for complex variables - [x] Allow variable overrides (with shortcut) in targets - [x] Don't allow to provide complex variables via flag or env variable - [x] Fail validation if complex value is used but not `type: complex` provided - [x] Support using variables inside complex variables ## Tests Added unit tests --------- Co-authored-by: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> --- .../resolve_resource_references_test.go | 32 ++- .../mutator/resolve_variable_references.go | 55 +++++- .../resolve_variable_references_test.go | 185 +++++++++++++++++- bundle/config/mutator/set_variables.go | 8 +- bundle/config/mutator/set_variables_test.go | 47 +++-- bundle/config/root.go | 22 ++- bundle/config/root_test.go | 21 +- bundle/config/variable/variable.go | 39 +++- bundle/phases/initialize.go | 6 +- bundle/schema/schema_test.go | 96 ++++----- bundle/tests/complex_variables_test.go | 62 ++++++ bundle/tests/variables/complex/databricks.yml | 49 +++++ bundle/tests/variables_test.go | 10 +- libs/dyn/convert/from_typed.go | 22 ++- libs/dyn/convert/from_typed_test.go | 36 ++++ libs/dyn/convert/normalize.go | 27 +++ libs/dyn/convert/normalize_test.go | 140 +++++++++++++ libs/dyn/convert/to_typed.go | 25 +++ libs/dyn/convert/to_typed_test.go | 22 +++ libs/dyn/dynvar/ref.go | 2 +- libs/dyn/dynvar/resolve_test.go | 60 ++++++ 21 files changed, 853 insertions(+), 113 deletions(-) create mode 100644 bundle/tests/complex_variables_test.go create mode 100644 bundle/tests/variables/complex/databricks.yml diff --git a/bundle/config/mutator/resolve_resource_references_test.go b/bundle/config/mutator/resolve_resource_references_test.go index 214b712e..86a03b23 100644 --- a/bundle/config/mutator/resolve_resource_references_test.go +++ b/bundle/config/mutator/resolve_resource_references_test.go @@ -35,7 +35,7 @@ func TestResolveClusterReference(t *testing.T) { }, }, "some-variable": { - Value: &justString, + Value: justString, }, }, }, @@ -53,8 +53,8 @@ func TestResolveClusterReference(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value) - require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["my-cluster-id-1"].Value) + require.Equal(t, "9876-5432-xywz", b.Config.Variables["my-cluster-id-2"].Value) } func TestResolveNonExistentClusterReference(t *testing.T) { @@ -69,7 +69,7 @@ func TestResolveNonExistentClusterReference(t *testing.T) { }, }, "some-variable": { - Value: &justString, + Value: justString, }, }, }, @@ -105,7 +105,7 @@ func TestNoLookupIfVariableIsSet(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value) + require.Equal(t, "random value", b.Config.Variables["my-cluster-id"].Value) } func TestResolveServicePrincipal(t *testing.T) { @@ -132,14 +132,11 @@ func TestResolveServicePrincipal(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value) + require.Equal(t, "app-1234", b.Config.Variables["my-sp"].Value) } func TestResolveVariableReferencesInVariableLookups(t *testing.T) { - s := func(s string) *string { - return &s - } - + s := "bar" b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -147,7 +144,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) { }, Variables: map[string]*variable.Variable{ "foo": { - Value: s("bar"), + Value: s, }, "lookup": { Lookup: &variable.Lookup{ @@ -168,7 +165,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) { diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) require.NoError(t, diags.Error()) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) } func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { @@ -197,22 +194,15 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { } func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) { - s := func(s string) *string { - return &s - } - b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Target: "dev", }, Variables: map[string]*variable.Variable{ - "foo": { - Value: s("bar"), - }, "lookup": { Lookup: &variable.Lookup{ - Cluster: "cluster-${var.foo}-${bundle.target}", + Cluster: "cluster-${bundle.target}", }, }, }, @@ -227,5 +217,5 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) { diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())) require.NoError(t, diags.Error()) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) } diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index f7fce6c8..cddc85cb 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -17,6 +17,7 @@ type resolveVariableReferences struct { prefixes []string pattern dyn.Pattern lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error) + skipFn func(dyn.Value) bool } func ResolveVariableReferences(prefixes ...string) bundle.Mutator { @@ -31,6 +32,18 @@ func ResolveVariableReferencesInLookup() bundle.Mutator { }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables} } +func ResolveVariableReferencesInComplexVariables() bundle.Mutator { + return &resolveVariableReferences{prefixes: []string{ + "bundle", + "workspace", + "variables", + }, + pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")), + lookupFn: lookupForComplexVariables, + skipFn: skipResolvingInNonComplexVariables, + } +} + func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { // Future opportunity: if we lookup this path in both the given root // and the synthesized root, we know if it was explicitly set or implied to be empty. @@ -38,6 +51,34 @@ func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { return dyn.GetByPath(v, path) } +func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) { + if path[0].Key() != "variables" { + return lookup(v, path) + } + + varV, err := dyn.GetByPath(v, path[:len(path)-1]) + if err != nil { + return dyn.InvalidValue, err + } + + var vv variable.Variable + err = convert.ToTyped(&vv, varV) + if err != nil { + return dyn.InvalidValue, err + } + + if vv.Type == variable.VariableTypeComplex { + return dyn.InvalidValue, fmt.Errorf("complex variables cannot contain references to another complex variables") + } + + return lookup(v, path) +} + +func skipResolvingInNonComplexVariables(v dyn.Value) bool { + _, ok := v.AsMap() + return !ok +} + func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) { if path[0].Key() != "variables" { return lookup(v, path) @@ -100,17 +141,27 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) // Resolve variable references in all values. return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) { // Rewrite the shorthand path ${var.foo} into ${variables.foo.value}. - if path.HasPrefix(varPath) && len(path) == 2 { - path = dyn.NewPath( + if path.HasPrefix(varPath) { + newPath := dyn.NewPath( dyn.Key("variables"), path[1], dyn.Key("value"), ) + + if len(path) > 2 { + newPath = newPath.Append(path[2:]...) + } + + path = newPath } // Perform resolution only if the path starts with one of the specified prefixes. for _, prefix := range prefixes { if path.HasPrefix(prefix) { + // Skip resolution if there is a skip function and it returns true. + if m.skipFn != nil && m.skipFn(v) { + return dyn.InvalidValue, dynvar.ErrSkipResolution + } return m.lookupFn(normalized, path) } } diff --git a/bundle/config/mutator/resolve_variable_references_test.go b/bundle/config/mutator/resolve_variable_references_test.go index 651ea3d2..2b88a249 100644 --- a/bundle/config/mutator/resolve_variable_references_test.go +++ b/bundle/config/mutator/resolve_variable_references_test.go @@ -43,10 +43,6 @@ func TestResolveVariableReferences(t *testing.T) { } func TestResolveVariableReferencesToBundleVariables(t *testing.T) { - s := func(s string) *string { - return &s - } - b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -57,7 +53,7 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) { }, Variables: map[string]*variable.Variable{ "foo": { - Value: s("bar"), + Value: "bar", }, }, }, @@ -195,3 +191,182 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers) assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice) } + +func TestResolveComplexVariable(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables")) + require.NoError(t, diags.Error()) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers) +} + +func TestResolveComplexVariableReferencesToFields(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0].new_cluster") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("node_type_id")), dyn.V("${var.cluster.node_type_id}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables")) + require.NoError(t, diags.Error()) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId) +} + +func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + "spark_conf": "${var.spark_conf}", + }, + Type: variable.VariableTypeComplex, + }, + "spark_conf": { + Value: map[string]any{ + "spark.executor.memory": "4g", + "spark.executor.cores": "2", + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, bundle.Seq(ResolveVariableReferencesInComplexVariables(), ResolveVariableReferences("bundle", "workspace", "variables"))) + require.ErrorContains(t, diags.Error(), "complex variables cannot contain references to another complex variables") +} diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 0cee24ab..b3a9cf40 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -30,6 +30,10 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di // case: read and set variable value from process environment envVarName := bundleVarPrefix + name if val, ok := env.Lookup(ctx, envVarName); ok { + if v.IsComplex() { + return diag.Errorf(`setting via environment variables (%s) is not supported for complex variable %s`, envVarName, name) + } + err := v.Set(val) if err != nil { return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err) @@ -45,9 +49,9 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di // case: Set the variable to its default value if v.HasDefault() { - err := v.Set(*v.Default) + err := v.Set(v.Default) if err != nil { - return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err) + return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, v.Default, name, err) } return nil } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index ae4f7989..65dedee9 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -15,7 +15,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { defaultVal := "default" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, + Default: defaultVal, } // set value for variable as an environment variable @@ -23,19 +23,19 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "process-env") + assert.Equal(t, variable.Value, "process-env") } func TestSetVariableUsingDefaultValue(t *testing.T) { defaultVal := "default" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, + Default: defaultVal, } diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "default") + assert.Equal(t, variable.Value, "default") } func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { @@ -43,15 +43,15 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { val := "assigned-value" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, - Value: &val, + Default: defaultVal, + Value: val, } // since a value is already assigned to the variable, it would not be overridden // by the default value diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "assigned-value") + assert.Equal(t, variable.Value, "assigned-value") } func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { @@ -59,8 +59,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { val := "assigned-value" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, - Value: &val, + Default: defaultVal, + Value: val, } // set value for variable as an environment variable @@ -70,7 +70,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { // by the value from environment diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "assigned-value") + assert.Equal(t, variable.Value, "assigned-value") } func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { @@ -92,15 +92,15 @@ func TestSetVariablesMutator(t *testing.T) { Variables: map[string]*variable.Variable{ "a": { Description: "resolved to default value", - Default: &defaultValForA, + Default: defaultValForA, }, "b": { Description: "resolved from environment vairables", - Default: &defaultValForB, + Default: defaultValForB, }, "c": { Description: "has already been assigned a value", - Value: &valForC, + Value: valForC, }, }, }, @@ -110,7 +110,22 @@ func TestSetVariablesMutator(t *testing.T) { diags := bundle.Apply(context.Background(), b, SetVariables()) require.NoError(t, diags.Error()) - assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) - assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) - assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) + assert.Equal(t, "default-a", b.Config.Variables["a"].Value) + assert.Equal(t, "env-var-b", b.Config.Variables["b"].Value) + assert.Equal(t, "assigned-val-c", b.Config.Variables["c"].Value) +} + +func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) { + defaultVal := "default" + variable := variable.Variable{ + Description: "a test variable", + Default: defaultVal, + Type: variable.VariableTypeComplex, + } + + // set value for variable as an environment variable + t.Setenv("BUNDLE_VAR_foo", "process-env") + + diags := setVariable(context.Background(), &variable, "foo") + assert.ErrorContains(t, diags.Error(), "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo") } diff --git a/bundle/config/root.go b/bundle/config/root.go index 2ce3a138..0def1167 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -267,6 +267,11 @@ func (r *Root) InitializeVariables(vars []string) error { if _, ok := r.Variables[name]; !ok { return fmt.Errorf("variable %s has not been defined", name) } + + if r.Variables[name].IsComplex() { + return fmt.Errorf("setting variables of complex type via --var flag is not supported: %s", name) + } + err := r.Variables[name].Set(val) if err != nil { return fmt.Errorf("failed to assign %s to %s: %s", val, name, err) @@ -419,7 +424,7 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) { } // For each variable, normalize its contents if it is a single string. - return dyn.Map(target, "variables", dyn.Foreach(func(_ dyn.Path, variable dyn.Value) (dyn.Value, error) { + return dyn.Map(target, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) { switch variable.Kind() { case dyn.KindString, dyn.KindBool, dyn.KindFloat, dyn.KindInt: @@ -430,6 +435,21 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) { "default": variable, }, variable.Location()), nil + case dyn.KindMap, dyn.KindSequence: + // Check if the original definition of variable has a type field. + typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type"))) + if err != nil { + return variable, nil + } + + if typeV.MustString() == "complex" { + return dyn.NewValue(map[string]dyn.Value{ + "default": variable, + }, variable.Location()), nil + } + + return variable, nil + default: return variable, nil } diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index b5676884..27cc3d22 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -51,7 +51,7 @@ func TestInitializeVariables(t *testing.T) { root := &Root{ Variables: map[string]*variable.Variable{ "foo": { - Default: &fooDefault, + Default: fooDefault, Description: "an optional variable since default is defined", }, "bar": { @@ -62,8 +62,8 @@ func TestInitializeVariables(t *testing.T) { err := root.InitializeVariables([]string{"foo=123", "bar=456"}) assert.NoError(t, err) - assert.Equal(t, "123", *(root.Variables["foo"].Value)) - assert.Equal(t, "456", *(root.Variables["bar"].Value)) + assert.Equal(t, "123", (root.Variables["foo"].Value)) + assert.Equal(t, "456", (root.Variables["bar"].Value)) } func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) { @@ -77,7 +77,7 @@ func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) { err := root.InitializeVariables([]string{"foo=123=567"}) assert.NoError(t, err) - assert.Equal(t, "123=567", *(root.Variables["foo"].Value)) + assert.Equal(t, "123=567", (root.Variables["foo"].Value)) } func TestInitializeVariablesInvalidFormat(t *testing.T) { @@ -119,3 +119,16 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) { require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, Development, root.Bundle.Mode) } + +func TestInitializeComplexVariablesViaFlagIsNotAllowed(t *testing.T) { + root := &Root{ + Variables: map[string]*variable.Variable{ + "foo": { + Type: variable.VariableTypeComplex, + }, + }, + } + + err := root.InitializeVariables([]string{"foo=123"}) + assert.ErrorContains(t, err, "setting variables of complex type via --var flag is not supported: foo") +} diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go index 5e700a9b..ba94f9c8 100644 --- a/bundle/config/variable/variable.go +++ b/bundle/config/variable/variable.go @@ -2,12 +2,27 @@ package variable import ( "fmt" + "reflect" +) + +// We are using `any` because since introduction of complex variables, +// variables can be of any type. +// Type alias is used to make it easier to understand the code. +type VariableValue = any + +type VariableType string + +const ( + VariableTypeComplex VariableType = "complex" ) // An input variable for the bundle config type Variable struct { + // A type of the variable. This is used to validate the value of the variable + Type VariableType `json:"type,omitempty"` + // A default value which then makes the variable optional - Default *string `json:"default,omitempty"` + Default VariableValue `json:"default,omitempty"` // Documentation for this input variable Description string `json:"description,omitempty"` @@ -21,7 +36,7 @@ type Variable struct { // 4. Default value defined in variable definition // 5. Throw error, since if no default value is defined, then the variable // is required - Value *string `json:"value,omitempty" bundle:"readonly"` + Value VariableValue `json:"value,omitempty" bundle:"readonly"` // The value of this field will be used to lookup the resource by name // And assign the value of the variable to ID of the resource found. @@ -39,10 +54,24 @@ func (v *Variable) HasValue() bool { return v.Value != nil } -func (v *Variable) Set(val string) error { +func (v *Variable) Set(val VariableValue) error { if v.HasValue() { - return fmt.Errorf("variable has already been assigned value: %s", *v.Value) + return fmt.Errorf("variable has already been assigned value: %s", v.Value) } - v.Value = &val + + rv := reflect.ValueOf(val) + switch rv.Kind() { + case reflect.Struct, reflect.Array, reflect.Slice, reflect.Map: + if v.Type != VariableTypeComplex { + return fmt.Errorf("variable type is not complex") + } + } + + v.Value = val + return nil } + +func (v *Variable) IsComplex() bool { + return v.Type == VariableTypeComplex +} diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index d96ee0eb..79fca9df 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -29,11 +29,13 @@ func Initialize() bundle.Mutator { mutator.ExpandWorkspaceRoot(), mutator.DefineDefaultWorkspacePaths(), mutator.SetVariables(), - // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences - // and ResolveVariableReferences. See what is expected in PythonMutatorPhaseInit doc + // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences, + // ResolveVariableReferencesInComplexVariables and ResolveVariableReferences. + // See what is expected in PythonMutatorPhaseInit doc pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit), mutator.ResolveVariableReferencesInLookup(), mutator.ResolveResourceReferences(), + mutator.ResolveVariableReferencesInComplexVariables(), mutator.ResolveVariableReferences( "bundle", "workspace", diff --git a/bundle/schema/schema_test.go b/bundle/schema/schema_test.go index ea4fd102..6d9df0cc 100644 --- a/bundle/schema/schema_test.go +++ b/bundle/schema/schema_test.go @@ -20,7 +20,7 @@ func TestIntSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }` @@ -47,7 +47,7 @@ func TestBooleanSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }` @@ -123,7 +123,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -134,7 +134,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -145,7 +145,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -156,7 +156,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -167,7 +167,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -178,7 +178,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -189,7 +189,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -200,7 +200,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -214,7 +214,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -225,7 +225,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -236,7 +236,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -247,7 +247,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -258,7 +258,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -326,7 +326,7 @@ func TestStructOfStructsSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -391,7 +391,7 @@ func TestStructOfMapsSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -481,7 +481,7 @@ func TestMapOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -518,7 +518,7 @@ func TestMapOfStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -556,7 +556,7 @@ func TestMapOfMapSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -661,7 +661,7 @@ func TestSliceOfMapSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -699,7 +699,7 @@ func TestSliceOfStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -757,7 +757,7 @@ func TestEmbeddedStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -797,7 +797,7 @@ func TestEmbeddedStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -892,7 +892,7 @@ func TestNonAnnotatedFieldsAreSkipped(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -934,7 +934,7 @@ func TestDashFieldsAreSkipped(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -987,7 +987,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1004,7 +1004,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1018,7 +1018,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1035,7 +1035,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1106,7 +1106,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1129,7 +1129,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1157,7 +1157,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1180,7 +1180,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1210,7 +1210,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1236,7 +1236,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1322,7 +1322,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1333,7 +1333,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1347,7 +1347,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1429,7 +1429,7 @@ func TestDocIngestionForObject(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1512,7 +1512,7 @@ func TestDocIngestionForSlice(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1524,7 +1524,7 @@ func TestDocIngestionForSlice(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1611,7 +1611,7 @@ func TestDocIngestionForMap(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1623,7 +1623,7 @@ func TestDocIngestionForMap(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1683,7 +1683,7 @@ func TestDocIngestionForTopLevelPrimitive(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1761,7 +1761,7 @@ func TestInterfaceGeneratesEmptySchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1810,7 +1810,7 @@ func TestBundleReadOnlytag(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1870,7 +1870,7 @@ func TestBundleInternalTag(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, diff --git a/bundle/tests/complex_variables_test.go b/bundle/tests/complex_variables_test.go new file mode 100644 index 00000000..ffe80e41 --- /dev/null +++ b/bundle/tests/complex_variables_test.go @@ -0,0 +1,62 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/stretchr/testify/require" +) + +func TestComplexVariables(t *testing.T) { + b, diags := loadTargetWithDiags("variables/complex", "default") + require.Empty(t, diags) + + diags = bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), + mutator.ResolveVariableReferences( + "variables", + ), + )) + require.NoError(t, diags.Error()) + + require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) + require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) + + require.Equal(t, 3, len(b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries)) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Jar: "/path/to/jar", + }) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Egg: "/path/to/egg", + }) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Whl: "/path/to/whl", + }) + + require.Equal(t, "task with spark version 13.2.x-scala2.11 and jar /path/to/jar", b.Config.Resources.Jobs["my_job"].Tasks[0].TaskKey) +} + +func TestComplexVariablesOverride(t *testing.T) { + b, diags := loadTargetWithDiags("variables/complex", "dev") + require.Empty(t, diags) + + diags = bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), + mutator.ResolveVariableReferences( + "variables", + ), + )) + require.NoError(t, diags.Error()) + + require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) + require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) +} diff --git a/bundle/tests/variables/complex/databricks.yml b/bundle/tests/variables/complex/databricks.yml new file mode 100644 index 00000000..f7535ad4 --- /dev/null +++ b/bundle/tests/variables/complex/databricks.yml @@ -0,0 +1,49 @@ +bundle: + name: complex-variables + +resources: + jobs: + my_job: + job_clusters: + - job_cluster_key: key + new_cluster: ${var.cluster} + tasks: + - task_key: test + job_cluster_key: key + libraries: ${variables.libraries.value} + task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}" + +variables: + node_type: + default: "Standard_DS3_v2" + cluster: + type: complex + description: "A cluster definition" + default: + spark_version: "13.2.x-scala2.11" + node_type_id: ${var.node_type} + num_workers: 2 + spark_conf: + spark.speculation: true + spark.databricks.delta.retentionDurationCheck.enabled: false + libraries: + type: complex + description: "A libraries definition" + default: + - jar: "/path/to/jar" + - egg: "/path/to/egg" + - whl: "/path/to/whl" + + +targets: + default: + dev: + variables: + node_type: "Standard_DS3_v3" + cluster: + spark_version: "14.2.x-scala2.11" + node_type_id: ${var.node_type} + num_workers: 4 + spark_conf: + spark.speculation: false + spark.databricks.delta.retentionDurationCheck.enabled: false diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 09441483..7cf0f72f 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -109,8 +109,8 @@ func TestVariablesWithoutDefinition(t *testing.T) { require.NoError(t, diags.Error()) require.True(t, b.Config.Variables["a"].HasValue()) require.True(t, b.Config.Variables["b"].HasValue()) - assert.Equal(t, "foo", *b.Config.Variables["a"].Value) - assert.Equal(t, "bar", *b.Config.Variables["b"].Value) + assert.Equal(t, "foo", b.Config.Variables["a"].Value) + assert.Equal(t, "bar", b.Config.Variables["b"].Value) } func TestVariablesWithTargetLookupOverrides(t *testing.T) { @@ -140,9 +140,9 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) { )) require.NoError(t, diags.Error()) - assert.Equal(t, "4321", *b.Config.Variables["d"].Value) - assert.Equal(t, "1234", *b.Config.Variables["e"].Value) - assert.Equal(t, "9876", *b.Config.Variables["f"].Value) + assert.Equal(t, "4321", b.Config.Variables["d"].Value) + assert.Equal(t, "1234", b.Config.Variables["e"].Value) + assert.Equal(t, "9876", b.Config.Variables["f"].Value) } func TestVariableTargetOverrides(t *testing.T) { diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index af49a07a..15c5b797 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -81,6 +81,11 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindMap, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) @@ -100,8 +105,13 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio refv = dyn.NilValue } + var options []fromTypedOptions + if v.Kind() == reflect.Interface { + options = append(options, includeZeroValues) + } + // Convert the field taking into account the reference value (may be equal to config.NilValue). - nv, err := fromTyped(v.Interface(), refv) + nv, err := fromTyped(v.Interface(), refv, options...) if err != nil { return dyn.InvalidValue, err } @@ -127,6 +137,11 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindMap, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) @@ -170,6 +185,11 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindSequence, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index e5447fe8..ed0c11ca 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -662,6 +662,42 @@ func TestFromTypedFloatTypeError(t *testing.T) { require.Error(t, err) } +func TestFromTypedAny(t *testing.T) { + type Tmp struct { + Foo any `json:"foo"` + Bar any `json:"bar"` + Foz any `json:"foz"` + Baz any `json:"baz"` + } + + src := Tmp{ + Foo: "foo", + Bar: false, + Foz: 0, + Baz: map[string]any{ + "foo": "foo", + "bar": 1234, + "qux": 0, + "nil": nil, + }, + } + + ref := dyn.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V(false), + "foz": dyn.V(int64(0)), + "baz": dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V(int64(1234)), + "qux": dyn.V(int64(0)), + "nil": dyn.V(nil), + }), + }), nv) +} + func TestFromTypedAnyNil(t *testing.T) { var src any = nil var ref = dyn.NilValue diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 35d4d821..ad82e20e 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -56,6 +56,8 @@ func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value, seen [] return n.normalizeInt(typ, src, path) case reflect.Float32, reflect.Float64: return n.normalizeFloat(typ, src, path) + case reflect.Interface: + return n.normalizeInterface(typ, src, path) } return dyn.InvalidValue, diag.Errorf("unsupported type: %s", typ.Kind()) @@ -166,8 +168,15 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a struct. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path)) } @@ -197,8 +206,15 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a map. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path)) } @@ -225,8 +241,15 @@ func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value, seen [ return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a slice. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindSequence, src, path)) } @@ -371,3 +394,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d return dyn.NewValue(out, src.Location()), diags } + +func (n normalizeOptions) normalizeInterface(typ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) { + return src, nil +} diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 843b4ea5..299ffcab 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -223,6 +223,52 @@ func TestNormalizeStructIncludeMissingFieldsOnRecursiveType(t *testing.T) { }), vout) } +func TestNormalizeStructVariableReference(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeStructRandomStringError(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeStructIntError(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeMap(t *testing.T) { var typ map[string]string vin := dyn.V(map[string]dyn.Value{ @@ -312,6 +358,40 @@ func TestNormalizeMapNestedError(t *testing.T) { ) } +func TestNormalizeMapVariableReference(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeMapRandomStringError(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeMapIntError(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeSlice(t *testing.T) { var typ []string vin := dyn.V([]dyn.Value{ @@ -400,6 +480,40 @@ func TestNormalizeSliceNestedError(t *testing.T) { ) } +func TestNormalizeSliceVariableReference(t *testing.T) { + var typ []string + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeSliceRandomStringError(t *testing.T) { + var typ []string + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected sequence, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeSliceIntError(t *testing.T) { + var typ []string + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected sequence, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeString(t *testing.T) { var typ string vin := dyn.V("string") @@ -725,3 +839,29 @@ func TestNormalizeAnchors(t *testing.T) { "foo": "bar", }, vout.AsAny()) } + +func TestNormalizeBoolToAny(t *testing.T) { + var typ any + vin := dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} + +func TestNormalizeIntToAny(t *testing.T) { + var typ any + vin := dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} + +func TestNormalizeSliceToAny(t *testing.T) { + var typ any + v1 := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + v2 := dyn.NewValue(2, dyn.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index f10853a2..91d6445a 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -46,6 +46,8 @@ func ToTyped(dst any, src dyn.Value) error { return toTypedInt(dstv, src) case reflect.Float32, reflect.Float64: return toTypedFloat(dstv, src) + case reflect.Interface: + return toTypedInterface(dstv, src) } return fmt.Errorf("unsupported type: %s", dstv.Kind()) @@ -101,6 +103,12 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -132,6 +140,12 @@ func toTypedMap(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -157,6 +171,12 @@ func toTypedSlice(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -260,3 +280,8 @@ func toTypedFloat(dst reflect.Value, src dyn.Value) error { msg: fmt.Sprintf("expected a float, found a %s", src.Kind()), } } + +func toTypedInterface(dst reflect.Value, src dyn.Value) error { + dst.Set(reflect.ValueOf(src.AsAny())) + return nil +} diff --git a/libs/dyn/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go index 56d98a3c..5e37f286 100644 --- a/libs/dyn/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -511,3 +511,25 @@ func TestToTypedWithAliasKeyType(t *testing.T) { assert.Equal(t, "bar", out["foo"]) assert.Equal(t, "baz", out["bar"]) } + +func TestToTypedAnyWithBool(t *testing.T) { + var out any + err := ToTyped(&out, dyn.V(false)) + require.NoError(t, err) + assert.Equal(t, false, out) + + err = ToTyped(&out, dyn.V(true)) + require.NoError(t, err) + assert.Equal(t, true, out) +} + +func TestToTypedAnyWithMap(t *testing.T) { + var out any + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), + }) + err := ToTyped(&out, v) + require.NoError(t, err) + assert.Equal(t, map[string]any{"foo": "bar", "bar": "baz"}, out) +} diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go index e6340269..bf160fa8 100644 --- a/libs/dyn/dynvar/ref.go +++ b/libs/dyn/dynvar/ref.go @@ -6,7 +6,7 @@ import ( "github.com/databricks/cli/libs/dyn" ) -const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}` +const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}` var re = regexp.MustCompile(VariableRegex) diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index bbecbb77..498322a4 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -247,3 +247,63 @@ func TestResolveWithInterpolateAliasedRef(t *testing.T) { assert.Equal(t, "a", getByPath(t, out, "b").MustString()) assert.Equal(t, "a", getByPath(t, out, "c").MustString()) } + +func TestResolveIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{dyn.V("a"), dyn.V("b")}), + "a": dyn.V("a: ${slice[0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveIndexedRefsFromMap(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "map": dyn.V( + map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{dyn.V("a")}), + }), + "a": dyn.V("a: ${map.slice[0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveMapFieldFromIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "map": dyn.V( + map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "value": dyn.V("a"), + }), + }), + }), + "a": dyn.V("a: ${map.slice[0].value}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveNestedIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{ + dyn.V([]dyn.Value{dyn.V("a")}), + }), + "a": dyn.V("a: ${slice[0][0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} From cdd6fe8cb96d76035337bfda0bb798b090095dfb Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jun 2024 14:24:31 +0200 Subject: [PATCH 7/9] Release v0.222.0 (#1529) CLI: * Add link to documentation for Homebrew installation to README ([#1505](https://github.com/databricks/cli/pull/1505)). * Fix `databricks configure` to use `DATABRICKS_CONFIG_FILE` environment variable if exists as config file ([#1325](https://github.com/databricks/cli/pull/1325)). Bundles: The Terraform upgrade to v1.48.0 includes a fix for library order not being respected. * Fix conditional in query in `default-sql` template ([#1479](https://github.com/databricks/cli/pull/1479)). * Remove user credentials specified in the Git origin URL ([#1494](https://github.com/databricks/cli/pull/1494)). * Serialize dynamic value for `bundle validate` output ([#1499](https://github.com/databricks/cli/pull/1499)). * Override variables with lookup value even if values has default value set ([#1504](https://github.com/databricks/cli/pull/1504)). * Pause quality monitors when "mode: development" is used ([#1481](https://github.com/databricks/cli/pull/1481)). * Return `fs.ModeDir` for Git folders in the workspace ([#1521](https://github.com/databricks/cli/pull/1521)). * Upgrade TF provider to 1.48.0 ([#1527](https://github.com/databricks/cli/pull/1527)). * Added support for complex variables ([#1467](https://github.com/databricks/cli/pull/1467)). Internal: * Add randIntn function ([#1475](https://github.com/databricks/cli/pull/1475)). * Avoid multiple file tree traversals on bundle deploy ([#1493](https://github.com/databricks/cli/pull/1493)). * Clean up unused code ([#1502](https://github.com/databricks/cli/pull/1502)). * Use `dyn.InvalidValue` to indicate absence ([#1507](https://github.com/databricks/cli/pull/1507)). * Add ApplyPythonMutator ([#1430](https://github.com/databricks/cli/pull/1430)). * Set bool pointer to disable lock ([#1516](https://github.com/databricks/cli/pull/1516)). * Allow the any type to be set to nil in `convert.FromTyped` ([#1518](https://github.com/databricks/cli/pull/1518)). * Properly deal with nil values in `convert.FromTyped` ([#1511](https://github.com/databricks/cli/pull/1511)). * Return `dyn.InvalidValue` instead of `dyn.NilValue` when errors happen ([#1514](https://github.com/databricks/cli/pull/1514)). * PythonMutator: replace stdin/stdout with files ([#1512](https://github.com/databricks/cli/pull/1512)). * Add context type and value to path rewriting ([#1525](https://github.com/databricks/cli/pull/1525)). API Changes: * Added schedule CRUD commands to `databricks lakeview`. * Added subscription CRUD commands to `databricks lakeview`. * Added `databricks apps start` command. OpenAPI commit 7437dabb9dadee402c1fc060df4c1ce8cc5369f0 (2024-06-24) Dependency updates: * Bump golang.org/x/text from 0.15.0 to 0.16.0 ([#1482](https://github.com/databricks/cli/pull/1482)). * Bump golang.org/x/term from 0.20.0 to 0.21.0 ([#1483](https://github.com/databricks/cli/pull/1483)). * Bump golang.org/x/mod from 0.17.0 to 0.18.0 ([#1484](https://github.com/databricks/cli/pull/1484)). * Bump golang.org/x/oauth2 from 0.20.0 to 0.21.0 ([#1485](https://github.com/databricks/cli/pull/1485)). * Bump github.com/briandowns/spinner from 1.23.0 to 1.23.1 ([#1495](https://github.com/databricks/cli/pull/1495)). * Bump github.com/spf13/cobra from 1.8.0 to 1.8.1 ([#1496](https://github.com/databricks/cli/pull/1496)). * Bump github.com/databricks/databricks-sdk-go from 0.42.0 to 0.43.0 ([#1522](https://github.com/databricks/cli/pull/1522)). --- CHANGELOG.md | 48 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f6f47dc..c5fcc45b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,53 @@ # Version changelog +## 0.222.0 + +CLI: + * Add link to documentation for Homebrew installation to README ([#1505](https://github.com/databricks/cli/pull/1505)). + * Fix `databricks configure` to use `DATABRICKS_CONFIG_FILE` environment variable if exists as config file ([#1325](https://github.com/databricks/cli/pull/1325)). + +Bundles: + +The Terraform upgrade to v1.48.0 includes a fix for library order not being respected. + + * Fix conditional in query in `default-sql` template ([#1479](https://github.com/databricks/cli/pull/1479)). + * Remove user credentials specified in the Git origin URL ([#1494](https://github.com/databricks/cli/pull/1494)). + * Serialize dynamic value for `bundle validate` output ([#1499](https://github.com/databricks/cli/pull/1499)). + * Override variables with lookup value even if values has default value set ([#1504](https://github.com/databricks/cli/pull/1504)). + * Pause quality monitors when "mode: development" is used ([#1481](https://github.com/databricks/cli/pull/1481)). + * Return `fs.ModeDir` for Git folders in the workspace ([#1521](https://github.com/databricks/cli/pull/1521)). + * Upgrade TF provider to 1.48.0 ([#1527](https://github.com/databricks/cli/pull/1527)). + * Added support for complex variables ([#1467](https://github.com/databricks/cli/pull/1467)). + +Internal: + * Add randIntn function ([#1475](https://github.com/databricks/cli/pull/1475)). + * Avoid multiple file tree traversals on bundle deploy ([#1493](https://github.com/databricks/cli/pull/1493)). + * Clean up unused code ([#1502](https://github.com/databricks/cli/pull/1502)). + * Use `dyn.InvalidValue` to indicate absence ([#1507](https://github.com/databricks/cli/pull/1507)). + * Add ApplyPythonMutator ([#1430](https://github.com/databricks/cli/pull/1430)). + * Set bool pointer to disable lock ([#1516](https://github.com/databricks/cli/pull/1516)). + * Allow the any type to be set to nil in `convert.FromTyped` ([#1518](https://github.com/databricks/cli/pull/1518)). + * Properly deal with nil values in `convert.FromTyped` ([#1511](https://github.com/databricks/cli/pull/1511)). + * Return `dyn.InvalidValue` instead of `dyn.NilValue` when errors happen ([#1514](https://github.com/databricks/cli/pull/1514)). + * PythonMutator: replace stdin/stdout with files ([#1512](https://github.com/databricks/cli/pull/1512)). + * Add context type and value to path rewriting ([#1525](https://github.com/databricks/cli/pull/1525)). + +API Changes: + * Added schedule CRUD commands to `databricks lakeview`. + * Added subscription CRUD commands to `databricks lakeview`. + * Added `databricks apps start` command. + +OpenAPI commit 7437dabb9dadee402c1fc060df4c1ce8cc5369f0 (2024-06-24) + +Dependency updates: + * Bump golang.org/x/text from 0.15.0 to 0.16.0 ([#1482](https://github.com/databricks/cli/pull/1482)). + * Bump golang.org/x/term from 0.20.0 to 0.21.0 ([#1483](https://github.com/databricks/cli/pull/1483)). + * Bump golang.org/x/mod from 0.17.0 to 0.18.0 ([#1484](https://github.com/databricks/cli/pull/1484)). + * Bump golang.org/x/oauth2 from 0.20.0 to 0.21.0 ([#1485](https://github.com/databricks/cli/pull/1485)). + * Bump github.com/briandowns/spinner from 1.23.0 to 1.23.1 ([#1495](https://github.com/databricks/cli/pull/1495)). + * Bump github.com/spf13/cobra from 1.8.0 to 1.8.1 ([#1496](https://github.com/databricks/cli/pull/1496)). + * Bump github.com/databricks/databricks-sdk-go from 0.42.0 to 0.43.0 ([#1522](https://github.com/databricks/cli/pull/1522)). + ## 0.221.1 Bundles: From dba6164a4ce5dd45b45f174f804f982db6ca94f0 Mon Sep 17 00:00:00 2001 From: Gleb Kanterov Date: Thu, 27 Jun 2024 11:47:58 +0200 Subject: [PATCH 8/9] merge.Override: Fix handling of dyn.NilValue (#1530) ## Changes Fix handling of `dyn.NilValue` in `merge.Override` in case `dyn.Value` has location ## Tests Unit tests --- libs/dyn/merge/override.go | 8 +++----- libs/dyn/merge/override_test.go | 6 +++--- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/libs/dyn/merge/override.go b/libs/dyn/merge/override.go index 97e8f100..81bbaa4d 100644 --- a/libs/dyn/merge/override.go +++ b/libs/dyn/merge/override.go @@ -30,10 +30,6 @@ func Override(leftRoot dyn.Value, rightRoot dyn.Value, visitor OverrideVisitor) } func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor OverrideVisitor) (dyn.Value, error) { - if left == dyn.NilValue && right == dyn.NilValue { - return dyn.NilValue, nil - } - if left.Kind() != right.Kind() { return visitor.VisitUpdate(basePath, left, right) } @@ -98,9 +94,11 @@ func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor Overri } else { return visitor.VisitUpdate(basePath, left, right) } + case dyn.KindNil: + return left, nil } - return dyn.InvalidValue, fmt.Errorf("unexpected kind %s", left.Kind()) + return dyn.InvalidValue, fmt.Errorf("unexpected kind %s at %s", left.Kind(), basePath.String()) } func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dyn.Mapping, visitor OverrideVisitor) (dyn.Mapping, error) { diff --git a/libs/dyn/merge/override_test.go b/libs/dyn/merge/override_test.go index a34f2342..d8fd4e17 100644 --- a/libs/dyn/merge/override_test.go +++ b/libs/dyn/merge/override_test.go @@ -330,9 +330,9 @@ func TestOverride_Primitive(t *testing.T) { { name: "nil (not updated)", state: visitorState{}, - left: dyn.NilValue, - right: dyn.NilValue, - expected: dyn.NilValue, + left: dyn.NilValue.WithLocation(leftLocation), + right: dyn.NilValue.WithLocation(rightLocation), + expected: dyn.NilValue.WithLocation(leftLocation), }, { name: "nil (updated)", From 4d8eba04cd008120ea544afcd03c54c05cf2e5dd Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 27 Jun 2024 18:58:19 +0530 Subject: [PATCH 9/9] Compare `.Kind()` instead of direct equality checks on a `dyn.Value` (#1520) ## Changes This PR makes two changes: 1. In https://github.com/databricks/cli/pull/1510 we'll be adding multiple associated location metadata with a dyn.Value. The Go compiler does not allow comparing structs if they contain slice values (presumably due to multiple possible definitions for equality). In anticipation for adding a `[]dyn.Location` type field to `dyn.Value` this PR removes all direct comparisons of `dyn.Value` and instead relies on the kind. 2. Retain location metadata for values in convert.FromTyped. The change diff is exactly the same as https://github.com/databricks/cli/pull/1523. It's been combined with this PR because they both depend on each other to prevent test failures (forming a test failure deadlock). Go patch used: ``` @@ var x expression @@ -x == dyn.InvalidValue +x.Kind() == dyn.KindInvalid @@ var x expression @@ -x != dyn.InvalidValue +x.Kind() != dyn.KindInvalid @@ var x expression @@ -x == dyn.NilValue +x.Kind() == dyn.KindNil @@ var x expression @@ -x != dyn.NilValue +x.Kind() != dyn.KindNil ``` ## Tests Unit tests and integration tests pass. --- bundle/config/mutator/environments_compat.go | 4 +- bundle/config/mutator/merge_job_clusters.go | 2 +- bundle/config/mutator/merge_job_tasks.go | 2 +- .../config/mutator/merge_pipeline_clusters.go | 2 +- bundle/config/mutator/run_as.go | 19 +-- bundle/config/root.go | 18 +-- .../{ => empty_run_as}/databricks.yml | 0 .../empty_sp/databricks.yml | 5 + .../empty_user/databricks.yml | 5 + .../empty_user_and_sp/databricks.yml | 6 + .../override}/databricks.yml | 0 .../override}/override.yml | 0 bundle/tests/run_as_test.go | 58 +++++++--- libs/dyn/convert/from_typed.go | 38 +++--- libs/dyn/convert/from_typed_test.go | 109 ++++++++++++++---- libs/dyn/convert/to_typed.go | 2 +- 16 files changed, 192 insertions(+), 78 deletions(-) rename bundle/tests/run_as/not_allowed/neither_sp_nor_user/{ => empty_run_as}/databricks.yml (100%) create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml create mode 100644 bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml rename bundle/tests/run_as/not_allowed/{neither_sp_nor_user_override => neither_sp_nor_user/override}/databricks.yml (100%) rename bundle/tests/run_as/not_allowed/{neither_sp_nor_user_override => neither_sp_nor_user/override}/override.yml (100%) diff --git a/bundle/config/mutator/environments_compat.go b/bundle/config/mutator/environments_compat.go index 053fd2e3..fb898ede 100644 --- a/bundle/config/mutator/environments_compat.go +++ b/bundle/config/mutator/environments_compat.go @@ -32,7 +32,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) dia targets := v.Get("targets") // Return an error if both "environments" and "targets" are set. - if environments != dyn.InvalidValue && targets != dyn.InvalidValue { + if environments.Kind() != dyn.KindInvalid && targets.Kind() != dyn.KindInvalid { return dyn.InvalidValue, fmt.Errorf( "both 'environments' and 'targets' are specified; only 'targets' should be used: %s", environments.Location().String(), @@ -40,7 +40,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) dia } // Rewrite "environments" to "targets". - if environments != dyn.InvalidValue && targets == dyn.InvalidValue { + if environments.Kind() != dyn.KindInvalid && targets.Kind() == dyn.KindInvalid { nv, err := dyn.Set(v, "targets", environments) if err != nil { return dyn.InvalidValue, err diff --git a/bundle/config/mutator/merge_job_clusters.go b/bundle/config/mutator/merge_job_clusters.go index ec615460..aa131f28 100644 --- a/bundle/config/mutator/merge_job_clusters.go +++ b/bundle/config/mutator/merge_job_clusters.go @@ -32,7 +32,7 @@ func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string { func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { - if v == dyn.NilValue { + if v.Kind() == dyn.KindNil { return v, nil } diff --git a/bundle/config/mutator/merge_job_tasks.go b/bundle/config/mutator/merge_job_tasks.go index f9a9bf71..9498e882 100644 --- a/bundle/config/mutator/merge_job_tasks.go +++ b/bundle/config/mutator/merge_job_tasks.go @@ -32,7 +32,7 @@ func (m *mergeJobTasks) taskKeyString(v dyn.Value) string { func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { - if v == dyn.NilValue { + if v.Kind() == dyn.KindNil { return v, nil } diff --git a/bundle/config/mutator/merge_pipeline_clusters.go b/bundle/config/mutator/merge_pipeline_clusters.go index c75f6532..52f3e6fa 100644 --- a/bundle/config/mutator/merge_pipeline_clusters.go +++ b/bundle/config/mutator/merge_pipeline_clusters.go @@ -35,7 +35,7 @@ func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string { func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { - if v == dyn.NilValue { + if v.Kind() == dyn.KindNil { return v, nil } diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index aecd1d17..d344a988 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -53,14 +53,20 @@ func (e errBothSpAndUserSpecified) Error() string { } func validateRunAs(b *bundle.Bundle) error { - runAs := b.Config.RunAs - - // Error if neither service_principal_name nor user_name are specified - if runAs.ServicePrincipalName == "" && runAs.UserName == "" { - return fmt.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as")) + neitherSpecifiedErr := fmt.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as")) + // Error if neither service_principal_name nor user_name are specified, but the + // run_as section is present. + if b.Config.Value().Get("run_as").Kind() == dyn.KindNil { + return neitherSpecifiedErr + } + // Error if one or both of service_principal_name and user_name are specified, + // but with empty values. + if b.Config.RunAs.ServicePrincipalName == "" && b.Config.RunAs.UserName == "" { + return neitherSpecifiedErr } // Error if both service_principal_name and user_name are specified + runAs := b.Config.RunAs if runAs.UserName != "" && runAs.ServicePrincipalName != "" { return errBothSpAndUserSpecified{ spName: runAs.ServicePrincipalName, @@ -163,8 +169,7 @@ func setPipelineOwnersToRunAsIdentity(b *bundle.Bundle) { func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics { // Mutator is a no-op if run_as is not specified in the bundle - runAs := b.Config.RunAs - if runAs == nil { + if b.Config.Value().Get("run_as").Kind() == dyn.KindInvalid { return nil } diff --git a/bundle/config/root.go b/bundle/config/root.go index 0def1167..60faba29 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -346,7 +346,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Merge `run_as`. This field must be overwritten if set, not merged. - if v := target.Get("run_as"); v != dyn.InvalidValue { + if v := target.Get("run_as"); v.Kind() != dyn.KindInvalid { root, err = dyn.Set(root, "run_as", v) if err != nil { return err @@ -354,7 +354,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Below, we're setting fields on the bundle key, so make sure it exists. - if root.Get("bundle") == dyn.InvalidValue { + if root.Get("bundle").Kind() == dyn.KindInvalid { root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})) if err != nil { return err @@ -362,7 +362,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Merge `mode`. This field must be overwritten if set, not merged. - if v := target.Get("mode"); v != dyn.InvalidValue { + if v := target.Get("mode"); v.Kind() != dyn.KindInvalid { root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v) if err != nil { return err @@ -370,7 +370,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Merge `compute_id`. This field must be overwritten if set, not merged. - if v := target.Get("compute_id"); v != dyn.InvalidValue { + if v := target.Get("compute_id"); v.Kind() != dyn.KindInvalid { root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v) if err != nil { return err @@ -378,7 +378,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // Merge `git`. - if v := target.Get("git"); v != dyn.InvalidValue { + if v := target.Get("git"); v.Kind() != dyn.KindInvalid { ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git"))) if err != nil { ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}) @@ -391,7 +391,7 @@ func (r *Root) MergeTargetOverrides(name string) error { } // If the branch was overridden, we need to clear the inferred flag. - if branch := v.Get("branch"); branch != dyn.InvalidValue { + if branch := v.Get("branch"); branch.Kind() != dyn.KindInvalid { out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{})) if err != nil { return err @@ -419,7 +419,7 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) { // For each target, rewrite the variables block. return dyn.Map(v, "targets", dyn.Foreach(func(_ dyn.Path, target dyn.Value) (dyn.Value, error) { // Confirm it has a variables block. - if target.Get("variables") == dyn.InvalidValue { + if target.Get("variables").Kind() == dyn.KindInvalid { return target, nil } @@ -464,7 +464,7 @@ func validateVariableOverrides(root, target dyn.Value) (err error) { var tv map[string]variable.Variable // Collect variables from the root. - if v := root.Get("variables"); v != dyn.InvalidValue { + if v := root.Get("variables"); v.Kind() != dyn.KindInvalid { err = convert.ToTyped(&rv, v) if err != nil { return fmt.Errorf("unable to collect variables from root: %w", err) @@ -472,7 +472,7 @@ func validateVariableOverrides(root, target dyn.Value) (err error) { } // Collect variables from the target. - if v := target.Get("variables"); v != dyn.InvalidValue { + if v := target.Get("variables"); v.Kind() != dyn.KindInvalid { err = convert.ToTyped(&tv, v) if err != nil { return fmt.Errorf("unable to collect variables from target: %w", err) diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_run_as/databricks.yml similarity index 100% rename from bundle/tests/run_as/not_allowed/neither_sp_nor_user/databricks.yml rename to bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_run_as/databricks.yml diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml new file mode 100644 index 00000000..be18f60e --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: "abc" + +run_as: + service_principal_name: "" diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml new file mode 100644 index 00000000..33c48cb5 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: "abc" + +run_as: + user_name: "" diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml new file mode 100644 index 00000000..4b59dc91 --- /dev/null +++ b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml @@ -0,0 +1,6 @@ +bundle: + name: "abc" + +run_as: + service_principal_name: "" + user_name: "" diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/override/databricks.yml similarity index 100% rename from bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/databricks.yml rename to bundle/tests/run_as/not_allowed/neither_sp_nor_user/override/databricks.yml diff --git a/bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml b/bundle/tests/run_as/not_allowed/neither_sp_nor_user/override/override.yml similarity index 100% rename from bundle/tests/run_as/not_allowed/neither_sp_nor_user_override/override.yml rename to bundle/tests/run_as/not_allowed/neither_sp_nor_user/override/override.yml diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 5ad7a89a..6c07cc53 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -196,27 +196,53 @@ func TestRunAsErrorWhenBothUserAndSpSpecified(t *testing.T) { } func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) { - b := load(t, "./run_as/not_allowed/neither_sp_nor_user") + tcases := []struct { + name string + err string + }{ + { + name: "empty_run_as", + err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_run_as/databricks.yml")), + }, + { + name: "empty_sp", + err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml")), + }, + { + name: "empty_user", + err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml")), + }, + { + name: "empty_user_and_sp", + err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml")), + }, + } - ctx := context.Background() - bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - b.Config.Workspace.CurrentUser = &config.User{ - User: &iam.User{ - UserName: "my_service_principal", - }, - } - return nil - }) + for _, tc := range tcases { + t.Run(tc.name, func(t *testing.T) { - diags := bundle.Apply(ctx, b, mutator.SetRunAs()) - err := diags.Error() + bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name) + b := load(t, bundlePath) - configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/databricks.yml") - assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", configPath)) + ctx := context.Background() + bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "my_service_principal", + }, + } + return nil + }) + + diags := bundle.Apply(ctx, b, mutator.SetRunAs()) + err := diags.Error() + assert.EqualError(t, err, tc.err) + }) + } } func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) { - b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user_override", "development") + b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user/override", "development") ctx := context.Background() bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { @@ -231,7 +257,7 @@ func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) { diags := bundle.Apply(ctx, b, mutator.SetRunAs()) err := diags.Error() - configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user_override/override.yml") + configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/override/override.yml") assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:12", configPath)) } diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index 15c5b797..e8d321f6 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -42,7 +42,7 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, // Dereference pointer if necessary for srcv.Kind() == reflect.Pointer { if srcv.IsNil() { - return dyn.NilValue, nil + return dyn.NilValue.WithLocation(ref.Location()), nil } srcv = srcv.Elem() @@ -55,27 +55,35 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, } } + var v dyn.Value + var err error switch srcv.Kind() { case reflect.Struct: - return fromTypedStruct(srcv, ref, options...) + v, err = fromTypedStruct(srcv, ref, options...) case reflect.Map: - return fromTypedMap(srcv, ref) + v, err = fromTypedMap(srcv, ref) case reflect.Slice: - return fromTypedSlice(srcv, ref) + v, err = fromTypedSlice(srcv, ref) case reflect.String: - return fromTypedString(srcv, ref, options...) + v, err = fromTypedString(srcv, ref, options...) case reflect.Bool: - return fromTypedBool(srcv, ref, options...) + v, err = fromTypedBool(srcv, ref, options...) case reflect.Int, reflect.Int32, reflect.Int64: - return fromTypedInt(srcv, ref, options...) + v, err = fromTypedInt(srcv, ref, options...) case reflect.Float32, reflect.Float64: - return fromTypedFloat(srcv, ref, options...) + v, err = fromTypedFloat(srcv, ref, options...) case reflect.Invalid: // If the value is untyped and not set (e.g. any type with nil value), we return nil. - return dyn.NilValue, nil + v, err = dyn.NilValue, nil + default: + return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) } - return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind()) + // Ensure the location metadata is retained. + if err != nil { + return dyn.InvalidValue, err + } + return v.WithLocation(ref.Location()), err } func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { @@ -117,7 +125,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio } // Either if the key was set in the reference or the field is not zero-valued, we include it. - if ok || nv != dyn.NilValue { + if ok || nv.Kind() != dyn.KindNil { out.Set(refk, nv) } } @@ -127,7 +135,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio // 2. The reference is a map (i.e. the struct was and still is empty). // 3. The "includeZeroValues" option is set (i.e. the struct is a non-nil pointer). if out.Len() > 0 || ref.Kind() == dyn.KindMap || slices.Contains(options, includeZeroValues) { - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } // Otherwise, return nil. @@ -179,7 +187,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out.Set(refk, nv) } - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { @@ -206,7 +214,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { refv := ref.Index(i) // Use nil reference if there is no reference for this index. - if refv == dyn.InvalidValue { + if refv.Kind() == dyn.KindInvalid { refv = dyn.NilValue } @@ -219,7 +227,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { out[i] = nv } - return dyn.NewValue(out, ref.Location()), nil + return dyn.V(out), nil } func fromTypedString(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index ed0c11ca..9141a694 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -49,7 +49,7 @@ func TestFromTypedStructPointerZeroFields(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) - // For an initialized pointer with a nil reference we expect a nil. + // For an initialized pointer with a nil reference we expect an empty map. src = &Tmp{} nv, err = FromTyped(src, dyn.NilValue) require.NoError(t, err) @@ -103,7 +103,7 @@ func TestFromTypedStructSetFields(t *testing.T) { }), nv) } -func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedStructSetFieldsRetainLocation(t *testing.T) { type Tmp struct { Foo string `json:"foo"` Bar string `json:"bar"` @@ -122,11 +122,9 @@ func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their location. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) } func TestFromTypedStringMapWithZeroValue(t *testing.T) { @@ -354,7 +352,7 @@ func TestFromTypedMapNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { var src = map[string]string{ "foo": "bar", "bar": "qux", @@ -368,11 +366,9 @@ func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) { nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their locations. assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo")) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar")) + assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar")) } func TestFromTypedMapFieldWithZeroValue(t *testing.T) { @@ -429,7 +425,7 @@ func TestFromTypedSliceNonEmpty(t *testing.T) { }), nv) } -func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { +func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { var src = []string{ "foo", "bar", @@ -437,17 +433,15 @@ func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) { ref := dyn.V([]dyn.Value{ dyn.NewValue("foo", dyn.Location{File: "foo"}), - dyn.NewValue("baz", dyn.Location{File: "baz"}), + dyn.NewValue("bar", dyn.Location{File: "bar"}), }) nv, err := FromTyped(src, ref) require.NoError(t, err) - // Assert foo has retained its location. + // Assert foo and bar have retained their locations. assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv.Index(0)) - - // Assert bar lost its location (because it was overwritten). - assert.Equal(t, dyn.NewValue("bar", dyn.Location{}), nv.Index(1)) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "bar"}), nv.Index(1)) } func TestFromTypedStringEmpty(t *testing.T) { @@ -482,12 +476,20 @@ func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V("new"), nv) } -func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) { - var src string = "foo" +func TestFromTypedStringRetainsLocations(t *testing.T) { var ref = dyn.NewValue("foo", dyn.Location{File: "foo"}) + + // case: value has not been changed + var src string = "foo" nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = "bar" + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv) } func TestFromTypedStringTypeError(t *testing.T) { @@ -529,12 +531,20 @@ func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(true), nv) } -func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) { - var src bool = true +func TestFromTypedBoolRetainsLocations(t *testing.T) { var ref = dyn.NewValue(true, dyn.Location{File: "foo"}) + + // case: value has not been changed + var src bool = true nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(true, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = false + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "foo"}), nv) } func TestFromTypedBoolVariableReference(t *testing.T) { @@ -584,12 +594,20 @@ func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(int64(1234)), nv) } -func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) { - var src int = 1234 +func TestFromTypedIntRetainsLocations(t *testing.T) { var ref = dyn.NewValue(1234, dyn.Location{File: "foo"}) + + // case: value has not been changed + var src int = 1234 nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1234, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = 1235 + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(int64(1235), dyn.Location{File: "foo"}), nv) } func TestFromTypedIntVariableReference(t *testing.T) { @@ -639,12 +657,21 @@ func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { assert.Equal(t, dyn.V(1.23), nv) } -func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) { - var src float64 = 1.23 +func TestFromTypedFloatRetainsLocations(t *testing.T) { + var src float64 var ref = dyn.NewValue(1.23, dyn.Location{File: "foo"}) + + // case: value has not been changed + src = 1.23 nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NewValue(1.23, dyn.Location{File: "foo"}), nv) + + // case: value has been changed + src = 1.24 + nv, err = FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(1.24, dyn.Location{File: "foo"}), nv) } func TestFromTypedFloatVariableReference(t *testing.T) { @@ -705,3 +732,35 @@ func TestFromTypedAnyNil(t *testing.T) { require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) } + +func TestFromTypedNilPointerRetainsLocations(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + Bar string `json:"bar"` + } + + var src *Tmp + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} + +func TestFromTypedNilMapRetainsLocation(t *testing.T) { + var src map[string]string + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} + +func TestFromTypedNilSliceRetainsLocation(t *testing.T) { + var src []string + ref := dyn.NewValue(nil, dyn.Location{File: "foobar"}) + + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv) +} diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index 91d6445a..8febe87a 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -16,7 +16,7 @@ func ToTyped(dst any, src dyn.Value) error { for dstv.Kind() == reflect.Pointer { // If the source value is nil and the destination is a settable pointer, // set the destination to nil. Also see `end_to_end_test.go`. - if dstv.CanSet() && src == dyn.NilValue { + if dstv.CanSet() && src.Kind() == dyn.KindNil { dstv.SetZero() return nil }