Merge branch 'main' into cp-summary-with-urls

This commit is contained in:
Pieter Noordhuis 2024-09-13 12:34:59 +02:00 committed by GitHub
commit 44110d1a58
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
120 changed files with 8308 additions and 10183 deletions

View File

@ -11,10 +11,10 @@
"toolchain": { "toolchain": {
"required": ["go"], "required": ["go"],
"post_generate": [ "post_generate": [
"go run ./bundle/internal/bundle/schema/main.go ./bundle/schema/docs/bundle_descriptions.json", "go run ./bundle/internal/schema/*.go ./bundle/schema/jsonschema.json",
"echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes", "echo 'bundle/internal/tf/schema/\\*.go linguist-generated=true' >> ./.gitattributes",
"echo 'go.sum linguist-generated=true' >> ./.gitattributes", "echo 'go.sum linguist-generated=true' >> ./.gitattributes",
"echo 'bundle/schema/docs/bundle_descriptions.json linguist-generated=true' >> ./.gitattributes" "echo 'bundle/schema/jsonschema.json linguist-generated=true' >> ./.gitattributes"
] ]
} }
} }

View File

@ -1 +1 @@
3eae49b444cac5a0118a3503e5b7ecef7f96527a d05898328669a3f8ab0c2ecee37db2673d3ea3f7

View File

@ -116,6 +116,10 @@ func allResolvers() *resolvers {
{{range .Services -}} {{range .Services -}}
{{- if in $allowlist .KebabName -}} {{- if in $allowlist .KebabName -}}
r.{{.Singular.PascalName}} = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.{{.Singular.PascalName}} = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["{{.Singular.PascalName}}"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.{{.PascalName}}.GetBy{{range .NamedIdMap.NamePath}}{{.PascalName}}{{end}}(ctx, name) entity, err := w.{{.PascalName}}.GetBy{{range .NamedIdMap.NamePath}}{{.PascalName}}{{end}}(ctx, name)
if err != nil { if err != nil {
return "", err return "", err

2
.gitattributes vendored
View File

@ -120,4 +120,4 @@ cmd/workspace/workspace-conf/workspace-conf.go linguist-generated=true
cmd/workspace/workspace/workspace.go linguist-generated=true cmd/workspace/workspace/workspace.go linguist-generated=true
bundle/internal/tf/schema/\*.go linguist-generated=true bundle/internal/tf/schema/\*.go linguist-generated=true
go.sum linguist-generated=true go.sum linguist-generated=true
bundle/schema/docs/bundle_descriptions.json linguist-generated=true bundle/schema/jsonschema.json linguist-generated=true

View File

@ -107,11 +107,18 @@ jobs:
run: npm install -g ajv-cli@5.0.0 run: npm install -g ajv-cli@5.0.0
# Assert that the generated bundle schema is a valid JSON schema by using # Assert that the generated bundle schema is a valid JSON schema by using
# ajv-cli to validate it against a sample configuration file. # ajv-cli to validate it against bundle configuration files.
# By default the ajv-cli runs in strict mode which will fail if the schema # By default the ajv-cli runs in strict mode which will fail if the schema
# itself is not valid. Strict mode is more strict than the JSON schema # itself is not valid. Strict mode is more strict than the JSON schema
# specification. See for details: https://ajv.js.org/options.html#strict-mode-options # specification. See for details: https://ajv.js.org/options.html#strict-mode-options
- name: Validate bundle schema - name: Validate bundle schema
run: | run: |
go run main.go bundle schema > schema.json go run main.go bundle schema > schema.json
ajv -s schema.json -d ./bundle/tests/basic/databricks.yml
for file in ./bundle/internal/schema/testdata/pass/*.yml; do
ajv test -s schema.json -d $file --valid
done
for file in ./bundle/internal/schema/testdata/fail/*.yml; do
ajv test -s schema.json -d $file --invalid
done

View File

@ -1,5 +1,34 @@
# Version changelog # Version changelog
## [Release] Release v0.228.0
CLI:
* Do not error if we cannot prompt for a profile in `auth login` ([#1745](https://github.com/databricks/cli/pull/1745)).
Bundles:
As of this release, the CLI will show a prompt if there are configuration changes that lead to DLT pipeline recreation.
Users can skip the prompt by specifying the `--auto-approve` flag.
* Pass along to Terraform process ([#1734](https://github.com/databricks/cli/pull/1734)).
* Add prompt when a pipeline recreation happens ([#1672](https://github.com/databricks/cli/pull/1672)).
* Use materialized views in the default-sql template ([#1709](https://github.com/databricks/cli/pull/1709)).
* Update templates to latest LTS DBR ([#1715](https://github.com/databricks/cli/pull/1715)).
* Make lock optional in the JSON schema ([#1738](https://github.com/databricks/cli/pull/1738)).
* Do not suppress normalisation diagnostics for resolving variables ([#1740](https://github.com/databricks/cli/pull/1740)).
* Include a permissions section in all templates ([#1713](https://github.com/databricks/cli/pull/1713)).
* Fixed complex variables are not being correctly merged from include files ([#1746](https://github.com/databricks/cli/pull/1746)).
* Fixed variable override in target with full variable syntax ([#1749](https://github.com/databricks/cli/pull/1749)).
Internal:
* Consider serverless clusters as compatible for Python wheel tasks ([#1733](https://github.com/databricks/cli/pull/1733)).
* PythonMutator: explain missing package error ([#1736](https://github.com/databricks/cli/pull/1736)).
* Add `dyn.Time` to box a timestamp with its original string value ([#1732](https://github.com/databricks/cli/pull/1732)).
* Fix streaming of stdout, stdin, stderr in cobra test runner ([#1742](https://github.com/databricks/cli/pull/1742)).
Dependency updates:
* Bump github.com/Masterminds/semver/v3 from 3.2.1 to 3.3.0 ([#1741](https://github.com/databricks/cli/pull/1741)).
## [Release] Release v0.227.1 ## [Release] Release v0.227.1
CLI: CLI:

View File

@ -33,12 +33,7 @@ func createGlobError(v dyn.Value, p dyn.Path, message string) diag.Diagnostic {
Severity: diag.Error, Severity: diag.Error,
Summary: fmt.Sprintf("%s: %s", source, message), Summary: fmt.Sprintf("%s: %s", source, message),
Locations: []dyn.Location{v.Location()}, Locations: []dyn.Location{v.Location()},
Paths: []dyn.Path{p},
Paths: []dyn.Path{
// Hack to clone the path. This path copy is mutable.
// To be addressed in a later PR.
p.Append(),
},
} }
} }

View File

@ -6,5 +6,5 @@ type Deployment struct {
FailOnActiveRuns bool `json:"fail_on_active_runs,omitempty"` FailOnActiveRuns bool `json:"fail_on_active_runs,omitempty"`
// Lock configures locking behavior on deployment. // Lock configures locking behavior on deployment.
Lock Lock `json:"lock"` Lock Lock `json:"lock,omitempty"`
} }

View File

@ -25,6 +25,20 @@ func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) {
value["tasks"] = dyn.NewValue(tasks, []dyn.Location{{Line: jobOrder.Get("tasks")}}) value["tasks"] = dyn.NewValue(tasks, []dyn.Location{{Line: jobOrder.Get("tasks")}})
} }
// We're processing job.Settings.Parameters separately to retain empty default values.
if len(job.Settings.Parameters) > 0 {
params := make([]dyn.Value, 0)
for _, parameter := range job.Settings.Parameters {
p := map[string]dyn.Value{
"name": dyn.NewValue(parameter.Name, []dyn.Location{{Line: 0}}), // We use Line: 0 to ensure that the name goes first.
"default": dyn.NewValue(parameter.Default, []dyn.Location{{Line: 1}}),
}
params = append(params, dyn.NewValue(p, []dyn.Location{}))
}
value["parameters"] = dyn.NewValue(params, []dyn.Location{{Line: jobOrder.Get("parameters")}})
}
return yamlsaver.ConvertToMapValue(job.Settings, jobOrder, []string{"format", "new_cluster", "existing_cluster_id"}, value) return yamlsaver.ConvertToMapValue(job.Settings, jobOrder, []string{"format", "new_cluster", "existing_cluster_id"}, value)
} }

View File

@ -1,15 +1,21 @@
package python package python
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"io"
"os" "os"
"path/filepath" "path/filepath"
"github.com/databricks/cli/libs/python"
"github.com/databricks/databricks-sdk-go/logger" "github.com/databricks/databricks-sdk-go/logger"
"github.com/fatih/color"
"strings"
"github.com/databricks/cli/libs/python"
"github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/env"
@ -169,7 +175,11 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err) return dyn.InvalidValue, diag.Errorf("failed to write input file: %s", err)
} }
stderrWriter := newLogWriter(ctx, "stderr: ") stderrBuf := bytes.Buffer{}
stderrWriter := io.MultiWriter(
newLogWriter(ctx, "stderr: "),
&stderrBuf,
)
stdoutWriter := newLogWriter(ctx, "stdout: ") stdoutWriter := newLogWriter(ctx, "stdout: ")
_, processErr := process.Background( _, processErr := process.Background(
@ -197,7 +207,13 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
// process can fail without reporting errors in diagnostics file or creating it, for instance, // process can fail without reporting errors in diagnostics file or creating it, for instance,
// venv doesn't have PyDABs library installed // venv doesn't have PyDABs library installed
if processErr != nil { if processErr != nil {
return dyn.InvalidValue, diag.Errorf("python mutator process failed: %sw, use --debug to enable logging", processErr) diagnostic := diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("python mutator process failed: %q, use --debug to enable logging", processErr),
Detail: explainProcessErr(stderrBuf.String()),
}
return dyn.InvalidValue, diag.Diagnostics{diagnostic}
} }
// or we can fail to read diagnostics file, that should always be created // or we can fail to read diagnostics file, that should always be created
@ -205,15 +221,40 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, r
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr) return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
} }
output, err := loadOutputFile(rootPath, outputPath) output, outputDiags := loadOutputFile(rootPath, outputPath)
if err != nil { pythonDiagnostics = pythonDiagnostics.Extend(outputDiags)
return dyn.InvalidValue, diag.Errorf("failed to load Python mutator output: %s", err)
}
// we pass through pythonDiagnostic because it contains warnings // we pass through pythonDiagnostic because it contains warnings
return output, pythonDiagnostics return output, pythonDiagnostics
} }
const installExplanation = `If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
and that the wheel is installed in the Python environment:
$ .venv/bin/pip install -e .
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
or activate the environment before running CLI commands:
experimental:
pydabs:
venv_path: .venv
`
// explainProcessErr provides additional explanation for common errors.
// It's meant to be the best effort, and not all errors are covered.
// Output should be used only used for error reporting.
func explainProcessErr(stderr string) string {
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
summary := color.CyanString("Explanation: ") + "'databricks-pydabs' library is not installed in the Python environment.\n"
return stderr + "\n" + summary + "\n" + installExplanation
}
return stderr
}
func writeInputFile(inputPath string, input dyn.Value) error { func writeInputFile(inputPath string, input dyn.Value) error {
// we need to marshal dyn.Value instead of bundle.Config to JSON to support // we need to marshal dyn.Value instead of bundle.Config to JSON to support
// non-string fields assigned with bundle variables // non-string fields assigned with bundle variables
@ -225,10 +266,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
return os.WriteFile(inputPath, rootConfigJson, 0600) return os.WriteFile(inputPath, rootConfigJson, 0600)
} }
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) { func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) {
outputFile, err := os.Open(outputPath) outputFile, err := os.Open(outputPath)
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to open output file: %w", err) return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
} }
defer outputFile.Close() defer outputFile.Close()
@ -243,27 +284,34 @@ func loadOutputFile(rootPath string, outputPath string) (dyn.Value, error) {
// for that, we pass virtualPath instead of outputPath as file location // for that, we pass virtualPath instead of outputPath as file location
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml")) virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to get absolute path: %w", err) return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
} }
generated, err := yamlloader.LoadYAML(virtualPath, outputFile) generated, err := yamlloader.LoadYAML(virtualPath, outputFile)
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to parse output file: %w", err) return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse output file: %w", err))
} }
normalized, diagnostic := convert.Normalize(config.Root{}, generated) return strictNormalize(config.Root{}, generated)
if diagnostic.Error() != nil { }
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %w", diagnostic.Error())
} func strictNormalize(dst any, generated dyn.Value) (dyn.Value, diag.Diagnostics) {
normalized, diags := convert.Normalize(dst, generated)
// warnings shouldn't happen because output should be already normalized // warnings shouldn't happen because output should be already normalized
// when it happens, it's a bug in the mutator, and should be treated as an error // when it happens, it's a bug in the mutator, and should be treated as an error
for _, d := range diagnostic.Filter(diag.Warning) { strictDiags := diag.Diagnostics{}
return dyn.InvalidValue, fmt.Errorf("failed to normalize output: %s", d.Summary)
for _, d := range diags {
if d.Severity == diag.Warning {
d.Severity = diag.Error
}
strictDiags = strictDiags.Append(d)
} }
return normalized, nil return normalized, strictDiags
} }
// loadDiagnosticsFile loads diagnostics from a file. // loadDiagnosticsFile loads diagnostics from a file.

View File

@ -10,6 +10,8 @@ import (
"runtime" "runtime"
"testing" "testing"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/merge" "github.com/databricks/cli/libs/dyn/merge"
"github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/env"
@ -255,7 +257,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
mutator := PythonMutator(PythonMutatorPhaseLoad) mutator := PythonMutator(PythonMutatorPhaseLoad)
diag := bundle.Apply(ctx, b, mutator) diag := bundle.Apply(ctx, b, mutator)
assert.EqualError(t, diag.Error(), "failed to load Python mutator output: failed to normalize output: unknown field: unknown_property") assert.EqualError(t, diag.Error(), "unknown field: unknown_property")
} }
func TestPythonMutator_disabled(t *testing.T) { func TestPythonMutator_disabled(t *testing.T) {
@ -546,6 +548,46 @@ func TestInterpreterPath(t *testing.T) {
} }
} }
func TestStrictNormalize(t *testing.T) {
// NB: there is no way to trigger diag.Error, so we don't test it
type TestStruct struct {
A int `json:"a"`
}
value := dyn.NewValue(map[string]dyn.Value{"A": dyn.NewValue("abc", nil)}, nil)
_, diags := convert.Normalize(TestStruct{}, value)
_, strictDiags := strictNormalize(TestStruct{}, value)
assert.False(t, diags.HasError())
assert.True(t, strictDiags.HasError())
}
func TestExplainProcessErr(t *testing.T) {
stderr := "/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')\n"
expected := `/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')
Explanation: 'databricks-pydabs' library is not installed in the Python environment.
If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
and that the wheel is installed in the Python environment:
$ .venv/bin/pip install -e .
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
or activate the environment before running CLI commands:
experimental:
pydabs:
venv_path: .venv
`
out := explainProcessErr(stderr)
assert.Equal(t, expected, out)
}
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context { func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context {
ctx := context.Background() ctx := context.Background()
ctx, stub := process.WithStub(ctx) ctx, stub := process.WithStub(ctx)

View File

@ -2,7 +2,6 @@ package mutator
import ( import (
"context" "context"
"fmt"
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -44,11 +43,13 @@ func TestResolveClusterReference(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
clusterApi := m.GetMockClustersAPI() clusterApi := m.GetMockClustersAPI()
clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef1).Return(&compute.ClusterDetails{ clusterApi.EXPECT().ListAll(mock.Anything, compute.ListClustersRequest{
ClusterId: "1234-5678-abcd", FilterBy: &compute.ListClustersFilterBy{
}, nil) ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef2).Return(&compute.ClusterDetails{ },
ClusterId: "9876-5432-xywz", }).Return([]compute.ClusterDetails{
{ClusterId: "1234-5678-abcd", ClusterName: clusterRef1},
{ClusterId: "9876-5432-xywz", ClusterName: clusterRef2},
}, nil) }, nil)
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
@ -78,10 +79,16 @@ func TestResolveNonExistentClusterReference(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
clusterApi := m.GetMockClustersAPI() clusterApi := m.GetMockClustersAPI()
clusterApi.EXPECT().GetByClusterName(mock.Anything, clusterRef).Return(nil, fmt.Errorf("ClusterDetails named '%s' does not exist", clusterRef)) clusterApi.EXPECT().ListAll(mock.Anything, compute.ListClustersRequest{
FilterBy: &compute.ListClustersFilterBy{
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
},
}).Return([]compute.ClusterDetails{
{ClusterId: "1234-5678-abcd", ClusterName: "some other cluster"},
}, nil)
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
require.ErrorContains(t, diags.Error(), "failed to resolve cluster: Random, err: ClusterDetails named 'Random' does not exist") require.ErrorContains(t, diags.Error(), "failed to resolve cluster: Random, err: cluster named 'Random' does not exist")
} }
func TestNoLookupIfVariableIsSet(t *testing.T) { func TestNoLookupIfVariableIsSet(t *testing.T) {
@ -158,8 +165,14 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
clusterApi := m.GetMockClustersAPI() clusterApi := m.GetMockClustersAPI()
clusterApi.EXPECT().GetByClusterName(mock.Anything, "cluster-bar-dev").Return(&compute.ClusterDetails{
ClusterId: "1234-5678-abcd", clusterApi.EXPECT().ListAll(mock.Anything, compute.ListClustersRequest{
FilterBy: &compute.ListClustersFilterBy{
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
},
}).Return([]compute.ClusterDetails{
{ClusterId: "1234-5678-abcd", ClusterName: "cluster-bar-dev"},
{ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"},
}, nil) }, nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences()))

View File

@ -10,7 +10,6 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/log"
) )
type resolveVariableReferences struct { type resolveVariableReferences struct {
@ -124,6 +123,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// We rewrite it here to make the resolution logic simpler. // We rewrite it here to make the resolution logic simpler.
varPath := dyn.NewPath(dyn.Key("var")) varPath := dyn.NewPath(dyn.Key("var"))
var diags diag.Diagnostics
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
// Synthesize a copy of the root that has all fields that are present in the type // Synthesize a copy of the root that has all fields that are present in the type
// but not set in the dynamic value set to their corresponding empty value. // but not set in the dynamic value set to their corresponding empty value.
@ -180,14 +180,13 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
// Normalize the result because variable resolution may have been applied to non-string fields. // Normalize the result because variable resolution may have been applied to non-string fields.
// For example, a variable reference may have been resolved to a integer. // For example, a variable reference may have been resolved to a integer.
root, diags := convert.Normalize(b.Config, root) root, normaliseDiags := convert.Normalize(b.Config, root)
for _, diag := range diags { diags = diags.Extend(normaliseDiags)
// This occurs when a variable's resolved value is incompatible with the field's type.
// Log a warning until we have a better way to surface these diagnostics to the user.
log.Warnf(ctx, "normalization diagnostic: %s", diag.Summary)
}
return root, nil return root, nil
}) })
return diag.FromErr(err) if err != nil {
diags = diags.Extend(diag.FromErr(err))
}
return diags
} }

View File

@ -406,6 +406,30 @@ func (r *Root) MergeTargetOverrides(name string) error {
return r.updateWithDynamicValue(root) return r.updateWithDynamicValue(root)
} }
var variableKeywords = []string{"default", "lookup"}
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
// A full syntax variable override is a map with only one of the following
// keys: "default", "lookup".
func isFullVariableOverrideDef(v dyn.Value) bool {
mv, ok := v.AsMap()
if !ok {
return false
}
if mv.Len() != 1 {
return false
}
for _, keyword := range variableKeywords {
if _, ok := mv.GetByString(keyword); ok {
return true
}
}
return false
}
// rewriteShorthands performs lightweight rewriting of the configuration // rewriteShorthands performs lightweight rewriting of the configuration
// tree where we allow users to write a shorthand and must rewrite to the full form. // tree where we allow users to write a shorthand and must rewrite to the full form.
func rewriteShorthands(v dyn.Value) (dyn.Value, error) { func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
@ -433,20 +457,27 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
}, variable.Locations()), nil }, variable.Locations()), nil
case dyn.KindMap, dyn.KindSequence: case dyn.KindMap, dyn.KindSequence:
// Check if the original definition of variable has a type field. // If it's a full variable definition, leave it as is.
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type"))) if isFullVariableOverrideDef(variable) {
if err != nil {
return variable, nil return variable, nil
} }
if typeV.MustString() == "complex" { // Check if the original definition of variable has a type field.
// If it has a type field, it means the shorthand is a value of a complex type.
// Type might not be found if the variable overriden in a separate file
// and configuration is not merged yet.
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type")))
if err == nil && typeV.MustString() == "complex" {
return dyn.NewValue(map[string]dyn.Value{ return dyn.NewValue(map[string]dyn.Value{
"type": typeV, "type": typeV,
"default": variable, "default": variable,
}, variable.Locations()), nil }, variable.Locations()), nil
} }
return variable, nil // If it's a shorthand, rewrite it to a full variable definition.
return dyn.NewValue(map[string]dyn.Value{
"default": variable,
}, variable.Locations()), nil
default: default:
return variable, nil return variable, nil

View File

@ -139,7 +139,7 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) {
}, },
Targets: map[string]*Target{ Targets: map[string]*Target{
"development": { "development": {
Variables: map[string]*variable.Variable{ Variables: map[string]*variable.TargetVariable{
"foo": { "foo": {
Default: "bar", Default: "bar",
Description: "wrong", Description: "wrong",

View File

@ -38,7 +38,26 @@ type Target struct {
// Override default values or lookup name for defined variables // Override default values or lookup name for defined variables
// Does not permit defining new variables or redefining existing ones // Does not permit defining new variables or redefining existing ones
// in the scope of an target // in the scope of an target
Variables map[string]*variable.Variable `json:"variables,omitempty"` //
// There are two valid ways to define a variable override in a target:
// 1. Direct value override. We normalize this to the variable.Variable
// struct format when loading the configuration YAML:
//
// variables:
// foo: "value"
//
// 2. Override matching the variable.Variable struct.
//
// variables:
// foo:
// default: "value"
//
// OR
//
// variables:
// foo:
// lookup: "resource_name"
Variables map[string]*variable.TargetVariable `json:"variables,omitempty"`
Git Git `json:"git,omitempty"` Git Git `json:"git,omitempty"`

View File

@ -3,7 +3,6 @@ package validate
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sort" "sort"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -66,10 +65,7 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.D
} }
} }
// dyn.Path under the hood is a slice. The code that walks the configuration m.paths = append(m.paths, p)
// tree uses the same underlying slice to track the path as it walks
// the tree. So, we need to clone it here.
m.paths = append(m.paths, slices.Clone(p))
m.locations = append(m.locations, v.Locations()...) m.locations = append(m.locations, v.Locations()...)
resourceMetadata[k] = m resourceMetadata[k] = m

View File

@ -220,6 +220,10 @@ type resolvers struct {
func allResolvers() *resolvers { func allResolvers() *resolvers {
r := &resolvers{} r := &resolvers{}
r.Alert = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Alert = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Alert"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Alerts.GetByDisplayName(ctx, name) entity, err := w.Alerts.GetByDisplayName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -228,6 +232,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.Id), nil return fmt.Sprint(entity.Id), nil
} }
r.ClusterPolicy = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.ClusterPolicy = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["ClusterPolicy"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.ClusterPolicies.GetByName(ctx, name) entity, err := w.ClusterPolicies.GetByName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -236,6 +244,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.PolicyId), nil return fmt.Sprint(entity.PolicyId), nil
} }
r.Cluster = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Cluster = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Cluster"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Clusters.GetByClusterName(ctx, name) entity, err := w.Clusters.GetByClusterName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -244,6 +256,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.ClusterId), nil return fmt.Sprint(entity.ClusterId), nil
} }
r.Dashboard = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Dashboard = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Dashboard"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Dashboards.GetByName(ctx, name) entity, err := w.Dashboards.GetByName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -252,6 +268,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.Id), nil return fmt.Sprint(entity.Id), nil
} }
r.InstancePool = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.InstancePool = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["InstancePool"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.InstancePools.GetByInstancePoolName(ctx, name) entity, err := w.InstancePools.GetByInstancePoolName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -260,6 +280,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.InstancePoolId), nil return fmt.Sprint(entity.InstancePoolId), nil
} }
r.Job = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Job = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Job"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Jobs.GetBySettingsName(ctx, name) entity, err := w.Jobs.GetBySettingsName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -268,6 +292,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.JobId), nil return fmt.Sprint(entity.JobId), nil
} }
r.Metastore = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Metastore = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Metastore"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Metastores.GetByName(ctx, name) entity, err := w.Metastores.GetByName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -276,6 +304,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.MetastoreId), nil return fmt.Sprint(entity.MetastoreId), nil
} }
r.Pipeline = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Pipeline = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Pipeline"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Pipelines.GetByName(ctx, name) entity, err := w.Pipelines.GetByName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -284,6 +316,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.PipelineId), nil return fmt.Sprint(entity.PipelineId), nil
} }
r.Query = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Query = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Query"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Queries.GetByDisplayName(ctx, name) entity, err := w.Queries.GetByDisplayName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -292,6 +328,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.Id), nil return fmt.Sprint(entity.Id), nil
} }
r.ServicePrincipal = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.ServicePrincipal = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["ServicePrincipal"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.ServicePrincipals.GetByDisplayName(ctx, name) entity, err := w.ServicePrincipals.GetByDisplayName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err
@ -300,6 +340,10 @@ func allResolvers() *resolvers {
return fmt.Sprint(entity.ApplicationId), nil return fmt.Sprint(entity.ApplicationId), nil
} }
r.Warehouse = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) { r.Warehouse = func(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
fn, ok := lookupOverrides["Warehouse"]
if ok {
return fn(ctx, w, name)
}
entity, err := w.Warehouses.GetByName(ctx, name) entity, err := w.Warehouses.GetByName(ctx, name)
if err != nil { if err != nil {
return "", err return "", err

View File

@ -0,0 +1,41 @@
package variable
import (
"context"
"fmt"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/compute"
)
var lookupOverrides = map[string]resolverFunc{
"Cluster": resolveCluster,
}
// We added a custom resolver for the cluster to add filtering for the cluster source when we list all clusters.
// Without the filtering listing could take a very long time (5-10 mins) which leads to lookup timeouts.
func resolveCluster(ctx context.Context, w *databricks.WorkspaceClient, name string) (string, error) {
result, err := w.Clusters.ListAll(ctx, compute.ListClustersRequest{
FilterBy: &compute.ListClustersFilterBy{
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
},
})
if err != nil {
return "", err
}
tmp := map[string][]compute.ClusterDetails{}
for _, v := range result {
key := v.ClusterName
tmp[key] = append(tmp[key], v)
}
alternatives, ok := tmp[name]
if !ok || len(alternatives) == 0 {
return "", fmt.Errorf("cluster named '%s' does not exist", name)
}
if len(alternatives) > 1 {
return "", fmt.Errorf("there are %d instances of clusters named '%s'", len(alternatives), name)
}
return alternatives[0].ClusterId, nil
}

View File

@ -16,6 +16,11 @@ const (
VariableTypeComplex VariableType = "complex" VariableTypeComplex VariableType = "complex"
) )
// We alias it here to override the JSON schema associated with a variable value
// in a target override. This is because we allow for directly specifying the value
// in addition to the variable.Variable struct format in a target override.
type TargetVariable Variable
// An input variable for the bundle config // An input variable for the bundle config
type Variable struct { type Variable struct {
// A type of the variable. This is used to validate the value of the variable // A type of the variable. This is used to validate the value of the variable

View File

@ -69,6 +69,11 @@ func (m *importResource) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn
// Remove output starting from Warning until end of output // Remove output starting from Warning until end of output
output = output[:bytes.Index([]byte(output), []byte("Warning:"))] output = output[:bytes.Index([]byte(output), []byte("Warning:"))]
cmdio.LogString(ctx, output) cmdio.LogString(ctx, output)
if !cmdio.IsPromptSupported(ctx) {
return diag.Errorf("This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed.")
}
ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.") ans, err := cmdio.AskYesOrNo(ctx, "Confirm import changes? Changes will be remotely applied only after running 'bundle deploy'.")
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)

View File

@ -111,6 +111,13 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error {
environ["PATH"] = path environ["PATH"] = path
} }
// Include $AZURE_CONFIG_FILE in set of environment variables to pass along.
// This is set in Azure DevOps by the AzureCLI@2 task.
azureConfigFile, ok := env.Lookup(ctx, "AZURE_CONFIG_FILE")
if ok {
environ["AZURE_CONFIG_FILE"] = azureConfigFile
}
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development. // Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration // See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
devConfigFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE") devConfigFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE")

View File

@ -269,19 +269,20 @@ func TestSetUserAgentExtraEnvVar(t *testing.T) {
} }
func TestInheritEnvVars(t *testing.T) { func TestInheritEnvVars(t *testing.T) {
env := map[string]string{}
t.Setenv("HOME", "/home/testuser") t.Setenv("HOME", "/home/testuser")
t.Setenv("PATH", "/foo:/bar") t.Setenv("PATH", "/foo:/bar")
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc") t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
t.Setenv("AZURE_CONFIG_FILE", "/tmp/foo/bar")
err := inheritEnvVars(context.Background(), env) ctx := context.Background()
env := map[string]string{}
require.NoError(t, err) err := inheritEnvVars(ctx, env)
if assert.NoError(t, err) {
require.Equal(t, env["HOME"], "/home/testuser") assert.Equal(t, "/home/testuser", env["HOME"])
require.Equal(t, env["PATH"], "/foo:/bar") assert.Equal(t, "/foo:/bar", env["PATH"])
require.Equal(t, env["TF_CLI_CONFIG_FILE"], "/tmp/config.tfrc") assert.Equal(t, "/tmp/config.tfrc", env["TF_CLI_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_FILE"])
}
} }
func TestSetUserProfileFromInheritEnvVars(t *testing.T) { func TestSetUserProfileFromInheritEnvVars(t *testing.T) {

View File

@ -1,42 +0,0 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"github.com/databricks/cli/bundle/schema"
)
func main() {
if len(os.Args) != 2 {
fmt.Println("Usage: go run main.go <output-file>")
os.Exit(1)
}
// Output file, to write the generated schema descriptions to.
outputFile := os.Args[1]
// Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile == "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set")
}
// Generate the schema descriptions.
docs, err := schema.UpdateBundleDescriptions(inputFile)
if err != nil {
log.Fatal(err)
}
result, err := json.MarshalIndent(docs, "", " ")
if err != nil {
log.Fatal(err)
}
// Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, result, 0644)
if err != nil {
log.Fatal(err)
}
}

View File

@ -0,0 +1,109 @@
package main
import (
"encoding/json"
"fmt"
"log"
"os"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/jsonschema"
)
func interpolationPattern(s string) string {
return fmt.Sprintf(`\$\{(%s(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)+)\}`, s)
}
func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
if typ == reflect.TypeOf(config.Root{}) || typ == reflect.TypeOf(variable.Variable{}) {
return s
}
// The variables block in a target override allows for directly specifying
// the value of the variable.
if typ == reflect.TypeOf(variable.TargetVariable{}) {
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{
// We keep the original schema so that autocomplete suggestions
// continue to work.
s,
// All values are valid for a variable value, be it primitive types
// like string/bool or complex ones like objects/arrays. Thus we override
// the schema to allow all valid JSON values.
{},
},
}
}
switch s.Type {
case jsonschema.ArrayType, jsonschema.ObjectType:
// arrays and objects can have complex variable values specified.
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{
Type: jsonschema.StringType,
Pattern: interpolationPattern("var"),
}},
}
case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType:
// primitives can have variable values, or references like ${bundle.xyz}
// or ${workspace.xyz}
return jsonschema.Schema{
AnyOf: []jsonschema.Schema{
s,
{Type: jsonschema.StringType, Pattern: interpolationPattern("resources")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("bundle")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("workspace")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("artifacts")},
{Type: jsonschema.StringType, Pattern: interpolationPattern("var")},
},
}
default:
return s
}
}
func main() {
if len(os.Args) != 2 {
fmt.Println("Usage: go run main.go <output-file>")
os.Exit(1)
}
// Output file, where the generated JSON schema will be written to.
outputFile := os.Args[1]
// Input file, the databricks openapi spec.
inputFile := os.Getenv("DATABRICKS_OPENAPI_SPEC")
if inputFile == "" {
log.Fatal("DATABRICKS_OPENAPI_SPEC environment variable not set")
}
p, err := newParser(inputFile)
if err != nil {
log.Fatal(err)
}
// Generate the JSON schema from the bundle Go struct.
s, err := jsonschema.FromType(reflect.TypeOf(config.Root{}), []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
p.addDescriptions,
p.addEnums,
addInterpolationPatterns,
})
if err != nil {
log.Fatal(err)
}
b, err := json.MarshalIndent(s, "", " ")
if err != nil {
log.Fatal(err)
}
// Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, b, 0644)
if err != nil {
log.Fatal(err)
}
}

View File

@ -0,0 +1,123 @@
package main
import (
"encoding/json"
"fmt"
"os"
"path"
"reflect"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
type Components struct {
Schemas map[string]jsonschema.Schema `json:"schemas,omitempty"`
}
type Specification struct {
Components Components `json:"components"`
}
type openapiParser struct {
ref map[string]jsonschema.Schema
}
func newParser(path string) (*openapiParser, error) {
b, err := os.ReadFile(path)
if err != nil {
return nil, err
}
spec := Specification{}
err = json.Unmarshal(b, &spec)
if err != nil {
return nil, err
}
p := &openapiParser{}
p.ref = spec.Components.Schemas
return p, nil
}
// This function checks if the input type:
// 1. Is a Databricks Go SDK type.
// 2. Has a Databricks Go SDK type embedded in it.
//
// If the above conditions are met, the function returns the JSON schema
// corresponding to the Databricks Go SDK type from the OpenAPI spec.
func (p *openapiParser) findRef(typ reflect.Type) (jsonschema.Schema, bool) {
typs := []reflect.Type{typ}
// Check for embedded Databricks Go SDK types.
if typ.Kind() == reflect.Struct {
for i := 0; i < typ.NumField(); i++ {
if !typ.Field(i).Anonymous {
continue
}
// Deference current type if it's a pointer.
ctyp := typ.Field(i).Type
for ctyp.Kind() == reflect.Ptr {
ctyp = ctyp.Elem()
}
typs = append(typs, ctyp)
}
}
for _, ctyp := range typs {
// Skip if it's not a Go SDK type.
if !strings.HasPrefix(ctyp.PkgPath(), "github.com/databricks/databricks-sdk-go") {
continue
}
pkgName := path.Base(ctyp.PkgPath())
k := fmt.Sprintf("%s.%s", pkgName, ctyp.Name())
// Skip if the type is not in the openapi spec.
_, ok := p.ref[k]
if !ok {
continue
}
// Return the first Go SDK type found in the openapi spec.
return p.ref[k], true
}
return jsonschema.Schema{}, false
}
// Use the OpenAPI spec to load descriptions for the given type.
func (p *openapiParser) addDescriptions(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
s.Description = ref.Description
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Description = refProp.Description
}
}
return s
}
// Use the OpenAPI spec add enum values for the given type.
func (p *openapiParser) addEnums(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
ref, ok := p.findRef(typ)
if !ok {
return s
}
s.Enum = append(s.Enum, ref.Enum...)
for k, v := range s.Properties {
if refProp, ok := ref.Properties[k]; ok {
v.Enum = append(v.Enum, refProp.Enum...)
}
}
return s
}

View File

@ -0,0 +1,3 @@
bundle:
# expected type is 'string'
name: 1234

View File

@ -0,0 +1,4 @@
resources:
jobs:
myjob:
format: INVALID_VALUE

View File

@ -0,0 +1,6 @@
resources:
models:
mymodel:
latest_versions:
- creation_timestamp: 123
status: INVALID_VALUE

View File

@ -0,0 +1,8 @@
resources:
jobs:
outer:
name: outer job
tasks:
- task_key: run job task 1
run_job_task:
job_id: ${invalid.reference}

View File

@ -0,0 +1,5 @@
resources:
models:
mymodel:
latest_versions:
- creation_timestamp: ${invalid.reference}

View File

@ -0,0 +1,9 @@
resources:
jobs:
foo:
name: my job
tasks:
# All tasks need to have a task_key.
- notebook_task:
notebook_path: /Users/abc/notebooks/inner
existing_cluster_id: abcd

View File

@ -0,0 +1,5 @@
resources:
jobs:
myjob:
# unknown fields should cause schema failure.
unknown_field: "value"

View File

@ -0,0 +1,6 @@
resources:
models:
mymodel:
creation_timestamp: 123
description: "my model"
unknown: "value"

View File

@ -0,0 +1 @@
unknown: value

View File

@ -0,0 +1,11 @@
artifacts:
abc:
path: /Workspace/a/b/c
type: wheel
files:
- source: ./x.whl
resources:
jobs:
foo:
name: ${artifacts.abc.type}

View File

@ -0,0 +1,2 @@
bundle:
name: basic

View File

@ -0,0 +1,4 @@
targets:
development:
variables:
myvar: value

View File

@ -0,0 +1,63 @@
bundle:
name: a job
workspace:
host: "https://myworkspace.com"
root_path: /abc
presets:
name_prefix: "[DEV]"
jobs_max_concurrent_runs: 10
variables:
simplevar:
default: true
description: "simplevar description"
complexvar:
default:
key1: value1
key2: value2
key3:
- value3
- value4
description: "complexvar description"
run_as:
service_principal_name: myserviceprincipal
resources:
jobs:
myjob:
name: myjob
continuous:
pause_status: PAUSED
edit_mode: EDITABLE
max_concurrent_runs: 10
description: "my job description"
email_notifications:
no_alert_for_skipped_runs: true
environments:
- environment_key: venv
spec:
dependencies:
- python=3.7
client: "myclient"
format: MULTI_TASK
tags:
foo: bar
bar: baz
tasks:
- task_key: mytask
notebook_task:
notebook_path: ${var.simplevar}
existing_cluster_id: abcd
- task_key: mytask2
for_each_task:
inputs: av
concurrency: 10
task:
task_key: inside_for_each
notebook_task:
notebook_path: ${var.complexvar.key3[0]}
- ${var.complexvar}

View File

@ -0,0 +1,72 @@
bundle:
name: ML
workspace:
host: "https://myworkspace.com"
root_path: /abc
presets:
name_prefix: "[DEV]"
jobs_max_concurrent_runs: 10
variables:
simplevar:
default: "true"
description: "simplevar description"
complexvar:
default:
key1: value1
key2: value2
key3:
- value3
- value4
description: "complexvar description"
resources:
models:
mymodel:
creation_timestamp: 123
description: "my model"
latest_versions:
- creation_timestamp: 123
tags: ${var.complexvar.key1}
status: READY
permissions:
- service_principal_name: myserviceprincipal
level: CAN_MANAGE
experiments:
myexperiment:
artifact_location: /dbfs/myexperiment
last_update_time: ${var.complexvar.key2}
lifecycle_stage: ${var.simplevar}
permissions:
- service_principal_name: myserviceprincipal
level: CAN_MANAGE
model_serving_endpoints:
myendpoint:
config:
served_models:
- model_name: ${resources.models.mymodel.name}
model_version: abc
scale_to_zero_enabled: true
workload_size: Large
name: myendpoint
schemas:
myschema:
catalog_name: mycatalog
name: myschema
registered_models:
myregisteredmodel:
catalog_name: mycatalog
name: myregisteredmodel
schema_name: ${resources.schemas.myschema.name}
grants:
- principal: abcd
privileges:
- SELECT
- INSERT

View File

@ -0,0 +1,54 @@
bundle:
name: a pipeline
workspace:
host: "https://myworkspace.com"
root_path: /abc
presets:
name_prefix: "[DEV]"
jobs_max_concurrent_runs: 10
variables:
simplevar:
default: true
description: "simplevar description"
complexvar:
default:
key1: value1
key2: value2
key3:
- value3
- value4
description: "complexvar description"
artifacts:
mywheel:
path: ./mywheel.whl
type: WHEEL
run_as:
service_principal_name: myserviceprincipal
resources:
jobs:
myjob:
name: myjob
tasks:
- task_key: ${bundle.name} pipeline trigger
pipeline_task:
pipeline_id: ${resources.mypipeline.id}
pipelines:
mypipeline:
name: mypipeline
libraries:
- whl: ./mywheel.whl
catalog: 3{var.complexvar.key2}
development: true
clusters:
- autoscale:
mode: ENHANCED
max_workers: 10
min_workers: 1

View File

@ -0,0 +1,16 @@
bundle:
name: quality_monitor
resources:
quality_monitors:
myqualitymonitor:
inference_log:
granularities:
- a
- b
model_id_col: a
prediction_col: b
timestamp_col: c
problem_type: PROBLEM_TYPE_CLASSIFICATION
assets_dir: /dbfs/mnt/abc
output_schema_name: default

View File

@ -0,0 +1,56 @@
bundle:
name: a run job task
databricks_cli_version: 0.200.0
compute_id: "mycompute"
variables:
simplevar:
default: 5678
description: "simplevar description"
complexvar:
default:
key1: 1234
key2: value2
key3:
- value3
- 9999
description: "complexvar description"
resources:
jobs:
inner:
permissions:
- user_name: user1
level: CAN_MANAGE
name: inner job
tasks:
- task_key: inner notebook task
notebook_task:
notebook_path: /Users/abc/notebooks/inner
existing_cluster_id: abcd
outer:
name: outer job
tasks:
- task_key: run job task 1
run_job_task:
job_id: 1234
- task_key: run job task 2
run_job_task:
job_id: ${var.complexvar.key1}
- task_key: run job task 3
run_job_task:
job_id: ${var.simplevar}
- task_key: run job task 4
run_job_task:
job_id: ${resources.inner.id}
- task_key: run job task 5
run_job_task:
job_id: ${var.complexvar.key3[1]}

View File

@ -0,0 +1,24 @@
bundle:
name: basic
variables:
complexvar:
default:
key1: 1234
key2: value2
key3:
- value3
- 9999
description: complexvar description
resources:
schemas:
myschema:
name: myschema
catalog_name: main
grants:
- ${var.complexvar}
- principal: ${workspace.current_user.me}
privileges:
- ${var.complexvar.key3[0]}
- ${var.complexvar.key2}

View File

@ -16,12 +16,10 @@ type expand struct {
func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic { func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic {
return diag.Diagnostic{ return diag.Diagnostic{
Severity: diag.Error, Severity: diag.Error,
Summary: message, Summary: message,
Paths: []dyn.Path{
p.Append(),
},
Locations: l, Locations: l,
Paths: []dyn.Path{p},
} }
} }
@ -41,7 +39,7 @@ func getLibDetails(v dyn.Value) (string, string, bool) {
} }
func findMatches(b *bundle.Bundle, path string) ([]string, error) { func findMatches(b *bundle.Bundle, path string) ([]string, error) {
matches, err := filepath.Glob(filepath.Join(b.RootPath, path)) matches, err := filepath.Glob(filepath.Join(b.SyncRootPath, path))
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -54,10 +52,10 @@ func findMatches(b *bundle.Bundle, path string) ([]string, error) {
} }
} }
// We make the matched path relative to the root path before storing it // We make the matched path relative to the sync root path before storing it
// to allow upload mutator to distinguish between local and remote paths // to allow upload mutator to distinguish between local and remote paths
for i, match := range matches { for i, match := range matches {
matches[i], err = filepath.Rel(b.RootPath, match) matches[i], err = filepath.Rel(b.SyncRootPath, match)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -213,8 +211,8 @@ func (e *expand) Name() string {
// ExpandGlobReferences expands any glob references in the libraries or environments section // ExpandGlobReferences expands any glob references in the libraries or environments section
// to corresponding local paths. // to corresponding local paths.
// We only expand local paths (i.e. paths that are relative to the root path). // We only expand local paths (i.e. paths that are relative to the sync root path).
// After expanding we make the paths relative to the root path to allow upload mutator later in the chain to // After expanding we make the paths relative to the sync root path to allow upload mutator later in the chain to
// distinguish between local and remote paths. // distinguish between local and remote paths.
func ExpandGlobReferences() bundle.Mutator { func ExpandGlobReferences() bundle.Mutator {
return &expand{} return &expand{}

View File

@ -23,7 +23,7 @@ func TestGlobReferencesExpandedForTaskLibraries(t *testing.T) {
testutil.Touch(t, dir, "jar", "my2.jar") testutil.Touch(t, dir, "jar", "my2.jar")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -104,7 +104,7 @@ func TestGlobReferencesExpandedForForeachTaskLibraries(t *testing.T) {
testutil.Touch(t, dir, "jar", "my2.jar") testutil.Touch(t, dir, "jar", "my2.jar")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -189,7 +189,7 @@ func TestGlobReferencesExpandedForEnvironmentsDeps(t *testing.T) {
testutil.Touch(t, dir, "jar", "my2.jar") testutil.Touch(t, dir, "jar", "my2.jar")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{

View File

@ -18,7 +18,7 @@ func TestValidateEnvironments(t *testing.T) {
testutil.Touch(t, tmpDir, "wheel.whl") testutil.Touch(t, tmpDir, "wheel.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -50,7 +50,7 @@ func TestValidateEnvironmentsNoFile(t *testing.T) {
tmpDir := t.TempDir() tmpDir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -84,7 +84,7 @@ func TestValidateTaskLibraries(t *testing.T) {
testutil.Touch(t, tmpDir, "wheel.whl") testutil.Touch(t, tmpDir, "wheel.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
@ -117,7 +117,7 @@ func TestValidateTaskLibrariesNoFile(t *testing.T) {
tmpDir := t.TempDir() tmpDir := t.TempDir()
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{

View File

@ -74,9 +74,9 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error
return v, nil return v, nil
} }
source = filepath.Join(b.RootPath, source) source = filepath.Join(b.SyncRootPath, source)
libs[source] = append(libs[source], configLocation{ libs[source] = append(libs[source], configLocation{
configPath: p.Append(), // Hack to get the copy of path configPath: p,
location: v.Location(), location: v.Location(),
}) })

View File

@ -24,7 +24,7 @@ func TestArtifactUploadForWorkspace(t *testing.T) {
whlLocalPath := filepath.Join(whlFolder, "source.whl") whlLocalPath := filepath.Join(whlFolder, "source.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
ArtifactPath: "/foo/bar/artifacts", ArtifactPath: "/foo/bar/artifacts",
@ -112,7 +112,7 @@ func TestArtifactUploadForVolumes(t *testing.T) {
whlLocalPath := filepath.Join(whlFolder, "source.whl") whlLocalPath := filepath.Join(whlFolder, "source.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
ArtifactPath: "/Volumes/foo/bar/artifacts", ArtifactPath: "/Volumes/foo/bar/artifacts",
@ -200,7 +200,7 @@ func TestArtifactUploadWithNoLibraryReference(t *testing.T) {
whlLocalPath := filepath.Join(whlFolder, "source.whl") whlLocalPath := filepath.Join(whlFolder, "source.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
ArtifactPath: "/Workspace/foo/bar/artifacts", ArtifactPath: "/Workspace/foo/bar/artifacts",
@ -240,7 +240,7 @@ func TestUploadMultipleLibraries(t *testing.T) {
testutil.Touch(t, whlFolder, "source4.whl") testutil.Touch(t, whlFolder, "source4.whl")
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: tmpDir, SyncRootPath: tmpDir,
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
ArtifactPath: "/foo/bar/artifacts", ArtifactPath: "/foo/bar/artifacts",

View File

@ -19,9 +19,38 @@ import (
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
terraformlib "github.com/databricks/cli/libs/terraform" terraformlib "github.com/databricks/cli/libs/terraform"
tfjson "github.com/hashicorp/terraform-json"
) )
func approvalForUcSchemaDelete(ctx context.Context, b *bundle.Bundle) (bool, error) { func parseTerraformActions(changes []*tfjson.ResourceChange, toInclude func(typ string, actions tfjson.Actions) bool) []terraformlib.Action {
res := make([]terraformlib.Action, 0)
for _, rc := range changes {
if !toInclude(rc.Type, rc.Change.Actions) {
continue
}
var actionType terraformlib.ActionType
switch {
case rc.Change.Actions.Delete():
actionType = terraformlib.ActionTypeDelete
case rc.Change.Actions.Replace():
actionType = terraformlib.ActionTypeRecreate
default:
// No use case for other action types yet.
continue
}
res = append(res, terraformlib.Action{
Action: actionType,
ResourceType: rc.Type,
ResourceName: rc.Name,
})
}
return res
}
func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) {
tf := b.Terraform tf := b.Terraform
if tf == nil { if tf == nil {
return false, fmt.Errorf("terraform not initialized") return false, fmt.Errorf("terraform not initialized")
@ -33,41 +62,52 @@ func approvalForUcSchemaDelete(ctx context.Context, b *bundle.Bundle) (bool, err
return false, err return false, err
} }
actions := make([]terraformlib.Action, 0) schemaActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool {
for _, rc := range plan.ResourceChanges { // Filter in only UC schema resources.
// We only care about destructive actions on UC schema resources. if typ != "databricks_schema" {
if rc.Type != "databricks_schema" { return false
continue
} }
var actionType terraformlib.ActionType // We only display prompts for destructive actions like deleting or
// recreating a schema.
return actions.Delete() || actions.Replace()
})
switch { dltActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool {
case rc.Change.Actions.Delete(): // Filter in only DLT pipeline resources.
actionType = terraformlib.ActionTypeDelete if typ != "databricks_pipeline" {
case rc.Change.Actions.Replace(): return false
actionType = terraformlib.ActionTypeRecreate
default:
// We don't need a prompt for non-destructive actions like creating
// or updating a schema.
continue
} }
actions = append(actions, terraformlib.Action{ // Recreating DLT pipeline leads to metadata loss and for a transient period
Action: actionType, // the underling tables will be unavailable.
ResourceType: rc.Type, return actions.Replace() || actions.Delete()
ResourceName: rc.Name, })
})
}
// No restricted actions planned. No need for approval. // We don't need to display any prompts in this case.
if len(actions) == 0 { if len(dltActions) == 0 && len(schemaActions) == 0 {
return true, nil return true, nil
} }
cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data may be lost:") // One or more UC schema resources will be deleted or recreated.
for _, action := range actions { if len(schemaActions) != 0 {
cmdio.Log(ctx, action) cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data may be lost:")
for _, action := range schemaActions {
cmdio.Log(ctx, action)
}
}
// One or more DLT pipelines is being recreated.
if len(dltActions) != 0 {
msg := `
This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:`
cmdio.LogString(ctx, msg)
for _, action := range dltActions {
cmdio.Log(ctx, action)
}
} }
if b.AutoApprove { if b.AutoApprove {
@ -126,7 +166,7 @@ func Deploy() bundle.Mutator {
terraform.CheckRunningResource(), terraform.CheckRunningResource(),
terraform.Plan(terraform.PlanGoal("deploy")), terraform.Plan(terraform.PlanGoal("deploy")),
bundle.If( bundle.If(
approvalForUcSchemaDelete, approvalForDeploy,
deployCore, deployCore,
bundle.LogString("Deployment cancelled!"), bundle.LogString("Deployment cancelled!"),
), ),

View File

@ -0,0 +1,67 @@
package phases
import (
"testing"
terraformlib "github.com/databricks/cli/libs/terraform"
tfjson "github.com/hashicorp/terraform-json"
"github.com/stretchr/testify/assert"
)
func TestParseTerraformActions(t *testing.T) {
changes := []*tfjson.ResourceChange{
{
Type: "databricks_pipeline",
Change: &tfjson.Change{
Actions: tfjson.Actions{tfjson.ActionCreate},
},
Name: "create pipeline",
},
{
Type: "databricks_pipeline",
Change: &tfjson.Change{
Actions: tfjson.Actions{tfjson.ActionDelete},
},
Name: "delete pipeline",
},
{
Type: "databricks_pipeline",
Change: &tfjson.Change{
Actions: tfjson.Actions{tfjson.ActionDelete, tfjson.ActionCreate},
},
Name: "recreate pipeline",
},
{
Type: "databricks_whatever",
Change: &tfjson.Change{
Actions: tfjson.Actions{tfjson.ActionDelete, tfjson.ActionCreate},
},
Name: "recreate whatever",
},
}
res := parseTerraformActions(changes, func(typ string, actions tfjson.Actions) bool {
if typ != "databricks_pipeline" {
return false
}
if actions.Delete() || actions.Replace() {
return true
}
return false
})
assert.Equal(t, []terraformlib.Action{
{
Action: terraformlib.ActionTypeDelete,
ResourceType: "databricks_pipeline",
ResourceName: "delete pipeline",
},
{
Action: terraformlib.ActionTypeRecreate,
ResourceType: "databricks_pipeline",
ResourceName: "recreate pipeline",
},
}, res)
}

View File

@ -2,6 +2,7 @@ package python
import ( import (
"context" "context"
"strconv"
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -38,7 +39,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
tasks := libraries.FindTasksWithLocalLibraries(b) tasks := libraries.FindTasksWithLocalLibraries(b)
for _, task := range tasks { for _, task := range tasks {
if task.NewCluster != nil { if task.NewCluster != nil {
if lowerThanExpectedVersion(ctx, task.NewCluster.SparkVersion) { if lowerThanExpectedVersion(task.NewCluster.SparkVersion) {
return true return true
} }
} }
@ -47,7 +48,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
for _, job := range b.Config.Resources.Jobs { for _, job := range b.Config.Resources.Jobs {
for _, cluster := range job.JobClusters { for _, cluster := range job.JobClusters {
if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" { if task.JobClusterKey == cluster.JobClusterKey && cluster.NewCluster.SparkVersion != "" {
if lowerThanExpectedVersion(ctx, cluster.NewCluster.SparkVersion) { if lowerThanExpectedVersion(cluster.NewCluster.SparkVersion) {
return true return true
} }
} }
@ -64,7 +65,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false return false
} }
if lowerThanExpectedVersion(ctx, version) { if lowerThanExpectedVersion(version) {
return true return true
} }
} }
@ -73,7 +74,7 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
return false return false
} }
func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool { func lowerThanExpectedVersion(sparkVersion string) bool {
parts := strings.Split(sparkVersion, ".") parts := strings.Split(sparkVersion, ".")
if len(parts) < 2 { if len(parts) < 2 {
return false return false
@ -82,6 +83,17 @@ func lowerThanExpectedVersion(ctx context.Context, sparkVersion string) bool {
if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99) if parts[1][0] == 'x' { // treat versions like 13.x as the very latest minor (13.99)
parts[1] = "99" parts[1] = "99"
} }
// if any of the version parts are not numbers, we can't compare
// so consider it as compatible version
if _, err := strconv.Atoi(parts[0]); err != nil {
return false
}
if _, err := strconv.Atoi(parts[1]); err != nil {
return false
}
v := "v" + parts[0] + "." + parts[1] v := "v" + parts[0] + "." + parts[1]
return semver.Compare(v, "v13.1") < 0 return semver.Compare(v, "v13.1") < 0
} }

View File

@ -344,6 +344,8 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
"14.1.x-scala2.12": false, "14.1.x-scala2.12": false,
"13.x-snapshot-scala-2.12": false, "13.x-snapshot-scala-2.12": false,
"13.x-rc-scala-2.12": false, "13.x-rc-scala-2.12": false,
"client.1.10-scala2.12": false,
"latest-stable-gpu-scala2.11": false,
"10.4.x-aarch64-photon-scala2.12": true, "10.4.x-aarch64-photon-scala2.12": true,
"10.4.x-scala2.12": true, "10.4.x-scala2.12": true,
"13.0.x-scala2.12": true, "13.0.x-scala2.12": true,
@ -351,7 +353,7 @@ func TestSparkVersionLowerThanExpected(t *testing.T) {
} }
for k, v := range testCases { for k, v := range testCases {
result := lowerThanExpectedVersion(context.Background(), k) result := lowerThanExpectedVersion(k)
require.Equal(t, v, result, k) require.Equal(t, v, result, k)
} }
} }

View File

@ -1,18 +0,0 @@
### Overview
`docs/bundle_descriptions.json` contains both autogenerated as well as manually written
descriptions for the json schema. Specifically
1. `resources` : almost all descriptions are autogenerated from the OpenAPI spec
2. `targets` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`)
3. `bundle` : manually editted
4. `include` : manually editted
5. `workspace` : manually editted
6. `artifacts` : manually editted
These descriptions are rendered in the inline documentation in an IDE
### SOP: Add schema descriptions for new fields in bundle config
Manually edit bundle_descriptions.json to add your descriptions. Note that the
descriptions in `resources` block is generated from the OpenAPI spec, and thus
any changes there will be overwritten.

View File

@ -1,109 +0,0 @@
package schema
import (
_ "embed"
"encoding/json"
"fmt"
"os"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/jsonschema"
)
// A subset of Schema struct
type Docs struct {
Description string `json:"description"`
Properties map[string]*Docs `json:"properties,omitempty"`
Items *Docs `json:"items,omitempty"`
AdditionalProperties *Docs `json:"additionalproperties,omitempty"`
}
//go:embed docs/bundle_descriptions.json
var bundleDocs []byte
func (docs *Docs) refreshTargetsDocs() error {
targetsDocs, ok := docs.Properties["targets"]
if !ok || targetsDocs.AdditionalProperties == nil ||
targetsDocs.AdditionalProperties.Properties == nil {
return fmt.Errorf("invalid targets descriptions")
}
targetProperties := targetsDocs.AdditionalProperties.Properties
propertiesToCopy := []string{"artifacts", "bundle", "resources", "workspace"}
for _, p := range propertiesToCopy {
targetProperties[p] = docs.Properties[p]
}
return nil
}
func LoadBundleDescriptions() (*Docs, error) {
embedded := Docs{}
err := json.Unmarshal(bundleDocs, &embedded)
return &embedded, err
}
func UpdateBundleDescriptions(openapiSpecPath string) (*Docs, error) {
embedded, err := LoadBundleDescriptions()
if err != nil {
return nil, err
}
// Generate schema from the embedded descriptions, and convert it back to docs.
// This creates empty descriptions for any properties that were missing in the
// embedded descriptions.
schema, err := New(reflect.TypeOf(config.Root{}), embedded)
if err != nil {
return nil, err
}
docs := schemaToDocs(schema)
// Load the Databricks OpenAPI spec
openapiSpec, err := os.ReadFile(openapiSpecPath)
if err != nil {
return nil, err
}
spec := &Specification{}
err = json.Unmarshal(openapiSpec, spec)
if err != nil {
return nil, err
}
openapiReader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
// Generate descriptions for the "resources" field
resourcesDocs, err := openapiReader.ResourcesDocs()
if err != nil {
return nil, err
}
resourceSchema, err := New(reflect.TypeOf(config.Resources{}), resourcesDocs)
if err != nil {
return nil, err
}
docs.Properties["resources"] = schemaToDocs(resourceSchema)
docs.refreshTargetsDocs()
return docs, nil
}
// *Docs are a subset of *Schema, this function selects that subset
func schemaToDocs(jsonSchema *jsonschema.Schema) *Docs {
// terminate recursion if schema is nil
if jsonSchema == nil {
return nil
}
docs := &Docs{
Description: jsonSchema.Description,
}
if len(jsonSchema.Properties) > 0 {
docs.Properties = make(map[string]*Docs)
}
for k, v := range jsonSchema.Properties {
docs.Properties[k] = schemaToDocs(v)
}
docs.Items = schemaToDocs(jsonSchema.Items)
if additionalProperties, ok := jsonSchema.AdditionalProperties.(*jsonschema.Schema); ok {
docs.AdditionalProperties = schemaToDocs(additionalProperties)
}
return docs
}

File diff suppressed because it is too large Load Diff

View File

@ -1,62 +0,0 @@
package schema
import (
"encoding/json"
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSchemaToDocs(t *testing.T) {
jsonSchema := &jsonschema.Schema{
Type: "object",
Description: "root doc",
Properties: map[string]*jsonschema.Schema{
"foo": {Type: "number", Description: "foo doc"},
"bar": {Type: "string"},
"octave": {
Type: "object",
AdditionalProperties: &jsonschema.Schema{Type: "number"},
Description: "octave docs",
},
"scales": {
Type: "object",
Description: "scale docs",
Items: &jsonschema.Schema{Type: "string"},
},
},
}
docs := schemaToDocs(jsonSchema)
docsJson, err := json.MarshalIndent(docs, " ", " ")
require.NoError(t, err)
expected :=
`{
"description": "root doc",
"properties": {
"bar": {
"description": ""
},
"foo": {
"description": "foo doc"
},
"octave": {
"description": "octave docs",
"additionalproperties": {
"description": ""
}
},
"scales": {
"description": "scale docs",
"items": {
"description": ""
}
}
}
}`
t.Log("[DEBUG] actual: ", string(docsJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(docsJson))
}

6
bundle/schema/embed.go Normal file
View File

@ -0,0 +1,6 @@
package schema
import _ "embed"
//go:embed jsonschema.json
var Bytes []byte

View File

@ -0,0 +1,71 @@
package schema_test
import (
"encoding/json"
"testing"
"github.com/databricks/cli/bundle/schema"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func walk(defs map[string]any, p ...string) jsonschema.Schema {
v, ok := defs[p[0]]
if !ok {
panic("not found: " + p[0])
}
if len(p) == 1 {
b, err := json.Marshal(v)
if err != nil {
panic(err)
}
res := jsonschema.Schema{}
err = json.Unmarshal(b, &res)
if err != nil {
panic(err)
}
return res
}
return walk(v.(map[string]any), p[1:]...)
}
func TestJsonSchema(t *testing.T) {
s := jsonschema.Schema{}
err := json.Unmarshal(schema.Bytes, &s)
require.NoError(t, err)
// Assert job fields have their descriptions loaded.
resourceJob := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Job")
fields := []string{"name", "continuous", "deployment", "tasks", "trigger"}
for _, field := range fields {
assert.NotEmpty(t, resourceJob.AnyOf[0].Properties[field].Description)
}
// Assert descriptions were also loaded for a job task definition.
jobTask := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.Task")
fields = []string{"notebook_task", "spark_jar_task", "spark_python_task", "spark_submit_task", "description", "depends_on", "environment_key", "for_each_task", "existing_cluster_id"}
for _, field := range fields {
assert.NotEmpty(t, jobTask.AnyOf[0].Properties[field].Description)
}
// Assert descriptions are loaded for pipelines
pipeline := walk(s.Definitions, "github.com", "databricks", "cli", "bundle", "config", "resources.Pipeline")
fields = []string{"name", "catalog", "clusters", "channel", "continuous", "deployment", "development"}
for _, field := range fields {
assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description)
}
// Assert enum values are loaded
schedule := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "catalog.MonitorCronSchedule")
assert.Contains(t, schedule.AnyOf[0].Properties["pause_status"].Enum, "PAUSED")
assert.Contains(t, schedule.AnyOf[0].Properties["pause_status"].Enum, "UNPAUSED")
providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider")
assert.Contains(t, providers.Enum, "gitHub")
assert.Contains(t, providers.Enum, "bitbucketCloud")
assert.Contains(t, providers.Enum, "gitHubEnterprise")
assert.Contains(t, providers.Enum, "bitbucketServer")
}

5561
bundle/schema/jsonschema.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,293 +0,0 @@
package schema
import (
"encoding/json"
"fmt"
"strings"
"github.com/databricks/cli/libs/jsonschema"
)
type OpenapiReader struct {
// OpenAPI spec to read schemas from.
OpenapiSpec *Specification
// In-memory cache of schemas read from the OpenAPI spec.
memo map[string]jsonschema.Schema
}
const SchemaPathPrefix = "#/components/schemas/"
// Read a schema directly from the OpenAPI spec.
func (reader *OpenapiReader) readOpenapiSchema(path string) (jsonschema.Schema, error) {
schemaKey := strings.TrimPrefix(path, SchemaPathPrefix)
// return early if we already have a computed schema
memoSchema, ok := reader.memo[schemaKey]
if ok {
return memoSchema, nil
}
// check path is present in openapi spec
openapiSchema, ok := reader.OpenapiSpec.Components.Schemas[schemaKey]
if !ok {
return jsonschema.Schema{}, fmt.Errorf("schema with path %s not found in openapi spec", path)
}
// convert openapi schema to the native schema struct
bytes, err := json.Marshal(*openapiSchema)
if err != nil {
return jsonschema.Schema{}, err
}
jsonSchema := jsonschema.Schema{}
err = json.Unmarshal(bytes, &jsonSchema)
if err != nil {
return jsonschema.Schema{}, err
}
// A hack to convert a map[string]interface{} to *Schema
// We rely on the type of a AdditionalProperties in downstream functions
// to do reference interpolation
_, ok = jsonSchema.AdditionalProperties.(map[string]interface{})
if ok {
b, err := json.Marshal(jsonSchema.AdditionalProperties)
if err != nil {
return jsonschema.Schema{}, err
}
additionalProperties := &jsonschema.Schema{}
err = json.Unmarshal(b, additionalProperties)
if err != nil {
return jsonschema.Schema{}, err
}
jsonSchema.AdditionalProperties = additionalProperties
}
// store read schema into memo
reader.memo[schemaKey] = jsonSchema
return jsonSchema, nil
}
// Resolve all nested "$ref" references in the schema. This function unrolls a single
// level of "$ref" in the schema and calls into traverseSchema to resolve nested references.
// Thus this function and traverseSchema are mutually recursive.
//
// This function is safe against reference loops. If a reference loop is detected, an error
// is returned.
func (reader *OpenapiReader) safeResolveRefs(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) {
if root.Reference == nil {
return reader.traverseSchema(root, tracker)
}
key := *root.Reference
// HACK to unblock CLI release (13th Feb 2024). This is temporary until proper
// support for recursive types is added to the docs generator. PR: https://github.com/databricks/cli/pull/1204
if strings.Contains(key, "ForEachTask") {
return root, nil
}
if tracker.hasCycle(key) {
// self reference loops can be supported however the logic is non-trivial because
// cross refernce loops are not allowed (see: http://json-schema.org/understanding-json-schema/structuring.html#recursion)
return nil, fmt.Errorf("references loop detected")
}
ref := *root.Reference
description := root.Description
tracker.push(ref, ref)
// Mark reference nil, so we do not traverse this again. This is tracked
// in the memo
root.Reference = nil
// unroll one level of reference.
selfRef, err := reader.readOpenapiSchema(ref)
if err != nil {
return nil, err
}
root = &selfRef
root.Description = description
// traverse again to find new references
root, err = reader.traverseSchema(root, tracker)
if err != nil {
return nil, err
}
tracker.pop(ref)
return root, err
}
// Traverse the nested properties of the schema to resolve "$ref" references. This function
// and safeResolveRefs are mutually recursive.
func (reader *OpenapiReader) traverseSchema(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) {
// case primitive (or invalid)
if root.Type != jsonschema.ObjectType && root.Type != jsonschema.ArrayType {
return root, nil
}
// only root references are resolved
if root.Reference != nil {
return reader.safeResolveRefs(root, tracker)
}
// case struct
if len(root.Properties) > 0 {
for k, v := range root.Properties {
childSchema, err := reader.safeResolveRefs(v, tracker)
if err != nil {
return nil, err
}
root.Properties[k] = childSchema
}
}
// case array
if root.Items != nil {
itemsSchema, err := reader.safeResolveRefs(root.Items, tracker)
if err != nil {
return nil, err
}
root.Items = itemsSchema
}
// case map
additionalProperties, ok := root.AdditionalProperties.(*jsonschema.Schema)
if ok && additionalProperties != nil {
valueSchema, err := reader.safeResolveRefs(additionalProperties, tracker)
if err != nil {
return nil, err
}
root.AdditionalProperties = valueSchema
}
return root, nil
}
func (reader *OpenapiReader) readResolvedSchema(path string) (*jsonschema.Schema, error) {
root, err := reader.readOpenapiSchema(path)
if err != nil {
return nil, err
}
tracker := newTracker()
tracker.push(path, path)
resolvedRoot, err := reader.safeResolveRefs(&root, tracker)
if err != nil {
return nil, tracker.errWithTrace(err.Error(), "")
}
return resolvedRoot, nil
}
func (reader *OpenapiReader) jobsDocs() (*Docs, error) {
jobSettingsSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "jobs.JobSettings")
if err != nil {
return nil, err
}
jobDocs := schemaToDocs(jobSettingsSchema)
// TODO: add description for id if needed.
// Tracked in https://github.com/databricks/cli/issues/242
jobsDocs := &Docs{
Description: "List of Databricks jobs",
AdditionalProperties: jobDocs,
}
return jobsDocs, nil
}
func (reader *OpenapiReader) pipelinesDocs() (*Docs, error) {
pipelineSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "pipelines.PipelineSpec")
if err != nil {
return nil, err
}
pipelineDocs := schemaToDocs(pipelineSpecSchema)
// TODO: Two fields in resources.Pipeline have the json tag id. Clarify the
// semantics and then add a description if needed. (https://github.com/databricks/cli/issues/242)
pipelinesDocs := &Docs{
Description: "List of DLT pipelines",
AdditionalProperties: pipelineDocs,
}
return pipelinesDocs, nil
}
func (reader *OpenapiReader) experimentsDocs() (*Docs, error) {
experimentSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Experiment")
if err != nil {
return nil, err
}
experimentDocs := schemaToDocs(experimentSpecSchema)
experimentsDocs := &Docs{
Description: "List of MLflow experiments",
AdditionalProperties: experimentDocs,
}
return experimentsDocs, nil
}
func (reader *OpenapiReader) modelsDocs() (*Docs, error) {
modelSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "ml.Model")
if err != nil {
return nil, err
}
modelDocs := schemaToDocs(modelSpecSchema)
modelsDocs := &Docs{
Description: "List of MLflow models",
AdditionalProperties: modelDocs,
}
return modelsDocs, nil
}
func (reader *OpenapiReader) modelServingEndpointsDocs() (*Docs, error) {
modelServingEndpointsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "serving.CreateServingEndpoint")
if err != nil {
return nil, err
}
modelServingEndpointsDocs := schemaToDocs(modelServingEndpointsSpecSchema)
modelServingEndpointsAllDocs := &Docs{
Description: "List of Model Serving Endpoints",
AdditionalProperties: modelServingEndpointsDocs,
}
return modelServingEndpointsAllDocs, nil
}
func (reader *OpenapiReader) registeredModelDocs() (*Docs, error) {
registeredModelsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "catalog.CreateRegisteredModelRequest")
if err != nil {
return nil, err
}
registeredModelsDocs := schemaToDocs(registeredModelsSpecSchema)
registeredModelsAllDocs := &Docs{
Description: "List of Registered Models",
AdditionalProperties: registeredModelsDocs,
}
return registeredModelsAllDocs, nil
}
func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) {
jobsDocs, err := reader.jobsDocs()
if err != nil {
return nil, err
}
pipelinesDocs, err := reader.pipelinesDocs()
if err != nil {
return nil, err
}
experimentsDocs, err := reader.experimentsDocs()
if err != nil {
return nil, err
}
modelsDocs, err := reader.modelsDocs()
if err != nil {
return nil, err
}
modelServingEndpointsDocs, err := reader.modelServingEndpointsDocs()
if err != nil {
return nil, err
}
registeredModelsDocs, err := reader.registeredModelDocs()
if err != nil {
return nil, err
}
return &Docs{
Description: "Collection of Databricks resources to deploy.",
Properties: map[string]*Docs{
"jobs": jobsDocs,
"pipelines": pipelinesDocs,
"experiments": experimentsDocs,
"models": modelsDocs,
"model_serving_endpoints": modelServingEndpointsDocs,
"registered_models": registeredModelsDocs,
},
}, nil
}

View File

@ -1,493 +0,0 @@
package schema
import (
"encoding/json"
"testing"
"github.com/databricks/cli/libs/jsonschema"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestReadSchemaForObject(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mango": {
"type": "object",
"description": "a mango for my schema",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"properties": {
"foo": {
"$ref": "#/components/schemas/foo"
}
}
}
}
}
}
`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mango": {
"type": "object",
"description": "a mango for my schema",
"properties": {
"foo": {
"type": "number"
}
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReadSchemaForArray(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"fruits": {
"type": "object",
"description": "fruits that are cool",
"items": {
"description": "some papayas, because papayas are fruits too",
"$ref": "#/components/schemas/papaya"
}
},
"papaya": {
"type": "number"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"items": {
"type": "number",
"description": "some papayas, because papayas are fruits too"
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReadSchemaForMap(t *testing.T) {
specString := `{
"components": {
"schemas": {
"fruits": {
"type": "object",
"description": "fruits that are meh",
"additionalProperties": {
"description": "watermelons. watermelons.",
"$ref": "#/components/schemas/watermelon"
}
},
"watermelon": {
"type": "number"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are meh",
"additionalProperties": {
"type": "number",
"description": "watermelons. watermelons."
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestRootReferenceIsResolved(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"abc": {
"type": "string"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
schema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(schema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "foo fighters fighting fruits",
"properties": {
"abc": {
"type": "string"
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestSelfReferenceLoopErrors(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"bar": {
"type": "object",
"$ref": "#/components/schemas/foo"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
_, err = reader.readResolvedSchema("#/components/schemas/fruits")
assert.ErrorContains(t, err, "references loop detected. traversal trace: -> #/components/schemas/fruits -> #/components/schemas/foo")
}
func TestCrossReferenceLoopErrors(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "object",
"description": "this description is ignored",
"properties": {
"bar": {
"type": "object",
"$ref": "#/components/schemas/fruits"
}
}
},
"fruits": {
"type": "object",
"description": "foo fighters fighting fruits",
"$ref": "#/components/schemas/foo"
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
_, err = reader.readResolvedSchema("#/components/schemas/fruits")
assert.ErrorContains(t, err, "references loop detected. traversal trace: -> #/components/schemas/fruits -> #/components/schemas/foo")
}
func TestReferenceResolutionForMapInObject(t *testing.T) {
specString := `
{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"additionalProperties": {
"description": "a single mango",
"$ref": "#/components/schemas/foo"
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"additionalProperties": {
"type": "number",
"description": "a single mango"
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReferenceResolutionForArrayInObject(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"$ref": "#/components/schemas/mango"
}
}
},
"mango": {
"type": "object",
"items": {
"description": "a single mango",
"$ref": "#/components/schemas/foo"
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"description": "fruits that are cool",
"properties": {
"guava": {
"type": "string",
"description": "a guava for my schema"
},
"mangos": {
"type": "object",
"description": "multiple mangos",
"items": {
"type": "number",
"description": "a single mango"
}
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}
func TestReferenceResolutionDoesNotOverwriteDescriptions(t *testing.T) {
specString := `{
"components": {
"schemas": {
"foo": {
"type": "number"
},
"fruits": {
"type": "object",
"properties": {
"guava": {
"type": "object",
"description": "Guava is a fruit",
"$ref": "#/components/schemas/foo"
},
"mango": {
"type": "object",
"description": "What is a mango?",
"$ref": "#/components/schemas/foo"
}
}
}
}
}
}`
spec := &Specification{}
reader := &OpenapiReader{
OpenapiSpec: spec,
memo: make(map[string]jsonschema.Schema),
}
err := json.Unmarshal([]byte(specString), spec)
require.NoError(t, err)
fruitsSchema, err := reader.readResolvedSchema("#/components/schemas/fruits")
require.NoError(t, err)
fruitsSchemaJson, err := json.MarshalIndent(fruitsSchema, " ", " ")
require.NoError(t, err)
expected := `{
"type": "object",
"properties": {
"guava": {
"type": "number",
"description": "Guava is a fruit"
},
"mango": {
"type": "number",
"description": "What is a mango?"
}
}
}`
t.Log("[DEBUG] actual: ", string(fruitsSchemaJson))
t.Log("[DEBUG] expected: ", expected)
assert.Equal(t, expected, string(fruitsSchemaJson))
}

View File

@ -1,287 +0,0 @@
package schema
import (
"container/list"
"fmt"
"reflect"
"strings"
"github.com/databricks/cli/libs/dyn/dynvar"
"github.com/databricks/cli/libs/jsonschema"
)
// Fields tagged "readonly" should not be emitted in the schema as they are
// computed at runtime, and should not be assigned a value by the bundle author.
const readonlyTag = "readonly"
// Annotation for internal bundle fields that should not be exposed to customers.
// Fields can be tagged as "internal" to remove them from the generated schema.
const internalTag = "internal"
// Annotation for bundle fields that have been deprecated.
// Fields tagged as "deprecated" are removed/omitted from the generated schema.
const deprecatedTag = "deprecated"
// This function translates golang types into json schema. Here is the mapping
// between json schema types and golang types
//
// - GolangType -> Javascript type / Json Schema2
//
// - bool -> boolean
//
// - string -> string
//
// - int (all variants) -> number
//
// - float (all variants) -> number
//
// - map[string]MyStruct -> { type: object, additionalProperties: {}}
// for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#additional-properties
//
// - []MyStruct -> {type: array, items: {}}
// for details visit: https://json-schema.org/understanding-json-schema/reference/array.html#items
//
// - []MyStruct -> {type: object, properties: {}, additionalProperties: false}
// for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#properties
func New(golangType reflect.Type, docs *Docs) (*jsonschema.Schema, error) {
tracker := newTracker()
schema, err := safeToSchema(golangType, docs, "", tracker)
if err != nil {
return nil, tracker.errWithTrace(err.Error(), "root")
}
return schema, nil
}
func jsonSchemaType(golangType reflect.Type) (jsonschema.Type, error) {
switch golangType.Kind() {
case reflect.Bool:
return jsonschema.BooleanType, nil
case reflect.String:
return jsonschema.StringType, nil
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64,
reflect.Float32, reflect.Float64:
return jsonschema.NumberType, nil
case reflect.Struct:
return jsonschema.ObjectType, nil
case reflect.Map:
if golangType.Key().Kind() != reflect.String {
return jsonschema.InvalidType, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind())
}
return jsonschema.ObjectType, nil
case reflect.Array, reflect.Slice:
return jsonschema.ArrayType, nil
default:
return jsonschema.InvalidType, fmt.Errorf("unhandled golang type: %s", golangType)
}
}
// A wrapper over toSchema function to:
// 1. Detect cycles in the bundle config struct.
// 2. Update tracker
//
// params:
//
// - golangType: Golang type to generate json schema for
//
// - docs: Contains documentation to be injected into the generated json schema
//
// - traceId: An identifier for the current type, to trace recursive traversal.
// Its value is the first json tag in case of struct fields and "" in other cases
// like array, map or no json tags
//
// - tracker: Keeps track of types / traceIds seen during recursive traversal
func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*jsonschema.Schema, error) {
// HACK to unblock CLI release (13th Feb 2024). This is temporary until proper
// support for recursive types is added to the schema generator. PR: https://github.com/databricks/cli/pull/1204
if traceId == "for_each_task" {
return &jsonschema.Schema{
Type: jsonschema.ObjectType,
}, nil
}
// WE ERROR OUT IF THERE ARE CYCLES IN THE JSON SCHEMA
// There are mechanisms to deal with cycles though recursive identifiers in json
// schema. However if we use them, we would need to make sure we are able to detect
// cycles where two properties (directly or indirectly) pointing to each other
//
// see: https://json-schema.org/understanding-json-schema/structuring.html#recursion
// for details
if tracker.hasCycle(golangType) {
return nil, fmt.Errorf("cycle detected")
}
tracker.push(golangType, traceId)
props, err := toSchema(golangType, docs, tracker)
if err != nil {
return nil, err
}
tracker.pop(golangType)
return props, nil
}
// This function returns all member fields of the provided type.
// If the type has embedded (aka anonymous) fields, this function traverses
// those in a breadth first manner
func getStructFields(golangType reflect.Type) []reflect.StructField {
fields := []reflect.StructField{}
bfsQueue := list.New()
for i := 0; i < golangType.NumField(); i++ {
bfsQueue.PushBack(golangType.Field(i))
}
for bfsQueue.Len() > 0 {
front := bfsQueue.Front()
field := front.Value.(reflect.StructField)
bfsQueue.Remove(front)
if !field.Anonymous {
fields = append(fields, field)
continue
}
fieldType := field.Type
if fieldType.Kind() == reflect.Pointer {
fieldType = fieldType.Elem()
}
for i := 0; i < fieldType.NumField(); i++ {
bfsQueue.PushBack(fieldType.Field(i))
}
}
return fields
}
func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschema.Schema, error) {
// *Struct and Struct generate identical json schemas
if golangType.Kind() == reflect.Pointer {
return safeToSchema(golangType.Elem(), docs, "", tracker)
}
if golangType.Kind() == reflect.Interface {
return &jsonschema.Schema{}, nil
}
rootJavascriptType, err := jsonSchemaType(golangType)
if err != nil {
return nil, err
}
jsonSchema := &jsonschema.Schema{Type: rootJavascriptType}
// If the type is a non-string primitive, then we allow it to be a string
// provided it's a pure variable reference (ie only a single variable reference).
if rootJavascriptType == jsonschema.BooleanType || rootJavascriptType == jsonschema.NumberType {
jsonSchema = &jsonschema.Schema{
AnyOf: []*jsonschema.Schema{
{
Type: rootJavascriptType,
},
{
Type: jsonschema.StringType,
Pattern: dynvar.VariableRegex,
},
},
}
}
if docs != nil {
jsonSchema.Description = docs.Description
}
// case array/slice
if golangType.Kind() == reflect.Array || golangType.Kind() == reflect.Slice {
elemGolangType := golangType.Elem()
elemJavascriptType, err := jsonSchemaType(elemGolangType)
if err != nil {
return nil, err
}
var childDocs *Docs
if docs != nil {
childDocs = docs.Items
}
elemProps, err := safeToSchema(elemGolangType, childDocs, "", tracker)
if err != nil {
return nil, err
}
jsonSchema.Items = &jsonschema.Schema{
Type: elemJavascriptType,
Properties: elemProps.Properties,
AdditionalProperties: elemProps.AdditionalProperties,
Items: elemProps.Items,
Required: elemProps.Required,
}
}
// case map
if golangType.Kind() == reflect.Map {
if golangType.Key().Kind() != reflect.String {
return nil, fmt.Errorf("only string keyed maps allowed")
}
var childDocs *Docs
if docs != nil {
childDocs = docs.AdditionalProperties
}
jsonSchema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker)
if err != nil {
return nil, err
}
}
// case struct
if golangType.Kind() == reflect.Struct {
children := getStructFields(golangType)
properties := map[string]*jsonschema.Schema{}
required := []string{}
for _, child := range children {
bundleTag := child.Tag.Get("bundle")
// Fields marked as "readonly", "internal" or "deprecated" are skipped
// while generating the schema
if bundleTag == readonlyTag || bundleTag == internalTag || bundleTag == deprecatedTag {
continue
}
// get child json tags
childJsonTag := strings.Split(child.Tag.Get("json"), ",")
childName := childJsonTag[0]
// skip children that have no json tags, the first json tag is ""
// or the first json tag is "-"
if childName == "" || childName == "-" {
continue
}
// get docs for the child if they exist
var childDocs *Docs
if docs != nil {
if val, ok := docs.Properties[childName]; ok {
childDocs = val
}
}
// compute if the child is a required field. Determined by the
// presence of "omitempty" in the json tags
hasOmitEmptyTag := false
for i := 1; i < len(childJsonTag); i++ {
if childJsonTag[i] == "omitempty" {
hasOmitEmptyTag = true
}
}
if !hasOmitEmptyTag {
required = append(required, childName)
}
// compute Schema.Properties for the child recursively
fieldProps, err := safeToSchema(child.Type, childDocs, childName, tracker)
if err != nil {
return nil, err
}
properties[childName] = fieldProps
}
jsonSchema.AdditionalProperties = false
jsonSchema.Properties = properties
jsonSchema.Required = required
}
return jsonSchema, nil
}

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +0,0 @@
package schema
import "github.com/databricks/cli/libs/jsonschema"
type Specification struct {
Components *Components `json:"components"`
}
type Components struct {
Schemas map[string]*jsonschema.Schema `json:"schemas,omitempty"`
}

View File

@ -1,53 +0,0 @@
package schema
import (
"container/list"
"fmt"
)
type tracker struct {
// Nodes encountered in current path during the recursive traversal. Used to
// check for cycles
seenNodes map[interface{}]struct{}
// List of node names encountered in order in current path during the recursive traversal.
// Used to hydrate errors with path to the exact node where error occured.
//
// NOTE: node and node names can be the same
listOfNodes *list.List
}
func newTracker() *tracker {
return &tracker{
seenNodes: map[interface{}]struct{}{},
listOfNodes: list.New(),
}
}
func (t *tracker) errWithTrace(prefix string, initTrace string) error {
traceString := initTrace
curr := t.listOfNodes.Front()
for curr != nil {
if curr.Value.(string) != "" {
traceString += " -> " + curr.Value.(string)
}
curr = curr.Next()
}
return fmt.Errorf(prefix + ". traversal trace: " + traceString)
}
func (t *tracker) hasCycle(node interface{}) bool {
_, ok := t.seenNodes[node]
return ok
}
func (t *tracker) push(node interface{}, name string) {
t.seenNodes[node] = struct{}{}
t.listOfNodes.PushBack(name)
}
func (t *tracker) pop(nodeType interface{}) {
back := t.listOfNodes.Back()
t.listOfNodes.Remove(back)
delete(t.seenNodes, nodeType)
}

View File

@ -68,3 +68,23 @@ func TestComplexVariablesOverride(t *testing.T) {
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"]) require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"])
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.PolicyId) require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.PolicyId)
} }
func TestComplexVariablesOverrideWithMultipleFiles(t *testing.T) {
b, diags := loadTargetWithDiags("variables/complex_multiple_files", "dev")
require.Empty(t, diags)
diags = bundle.Apply(context.Background(), b, bundle.Seq(
mutator.SetVariables(),
mutator.ResolveVariableReferencesInComplexVariables(),
mutator.ResolveVariableReferences(
"variables",
),
))
require.NoError(t, diags.Error())
for _, cluster := range b.Config.Resources.Jobs["my_job"].JobClusters {
require.Equalf(t, "14.2.x-scala2.11", cluster.NewCluster.SparkVersion, "cluster: %v", cluster.JobClusterKey)
require.Equalf(t, "Standard_DS3_v2", cluster.NewCluster.NodeTypeId, "cluster: %v", cluster.JobClusterKey)
require.Equalf(t, 4, cluster.NewCluster.NumWorkers, "cluster: %v", cluster.JobClusterKey)
require.Equalf(t, "false", cluster.NewCluster.SparkConf["spark.speculation"], "cluster: %v", cluster.JobClusterKey)
}
}

View File

@ -1,6 +1,9 @@
bundle: bundle:
name: python-wheel-local name: python-wheel-local
workspace:
artifact_path: /foo/bar
resources: resources:
jobs: jobs:
test_job: test_job:

View File

@ -15,11 +15,10 @@ import (
) )
func TestPythonWheelBuild(t *testing.T) { func TestPythonWheelBuild(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl")
@ -32,11 +31,10 @@ func TestPythonWheelBuild(t *testing.T) {
} }
func TestPythonWheelBuildAutoDetect(t *testing.T) { func TestPythonWheelBuildAutoDetect(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl")
@ -49,11 +47,10 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) {
} }
func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) { func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_notebook")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl")
@ -66,11 +63,10 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
} }
func TestPythonWheelWithDBFSLib(t *testing.T) { func TestPythonWheelWithDBFSLib(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_dbfs_lib")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()
@ -79,11 +75,11 @@ func TestPythonWheelWithDBFSLib(t *testing.T) {
} }
func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_artifact_no_setup")
require.NoError(t, err)
b.Config.Workspace.ArtifactPath = "/foo/bar" ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error())
mockFiler := mockfiler.NewMockFiler(t) mockFiler := mockfiler.NewMockFiler(t)
mockFiler.EXPECT().Write( mockFiler.EXPECT().Write(
@ -94,20 +90,20 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
u := libraries.UploadWithClient(mockFiler) diags = bundle.Apply(ctx, b, bundle.Seq(
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build(), libraries.ExpandGlobReferences(), u)) libraries.ExpandGlobReferences(),
libraries.UploadWithClient(mockFiler),
))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Empty(t, diags) require.Empty(t, diags)
require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl) require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl)
} }
func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) { func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/environment_key", "default")
b, err := bundle.Load(ctx, "./python_wheel/environment_key")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl")
@ -120,11 +116,10 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
} }
func TestPythonWheelBuildMultiple(t *testing.T) { func TestPythonWheelBuildMultiple(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_multiple")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl")
@ -137,11 +132,10 @@ func TestPythonWheelBuildMultiple(t *testing.T) {
} }
func TestPythonWheelNoBuild(t *testing.T) { func TestPythonWheelNoBuild(t *testing.T) {
ctx := context.Background() b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default")
b, err := bundle.Load(ctx, "./python_wheel/python_wheel_no_build")
require.NoError(t, err)
diags := bundle.Apply(ctx, b, bundle.Seq(phases.Load(), phases.Build())) ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()

View File

@ -0,0 +1,52 @@
bundle:
name: complex-variables-multiple-files
resources:
jobs:
my_job:
job_clusters:
- job_cluster_key: key1
new_cluster: ${var.cluster1}
- job_cluster_key: key2
new_cluster: ${var.cluster2}
- job_cluster_key: key3
new_cluster: ${var.cluster3}
- job_cluster_key: key4
new_cluster: ${var.cluster4}
variables:
cluster1:
type: complex
description: "A cluster definition"
cluster2:
type: complex
description: "A cluster definition"
cluster3:
type: complex
description: "A cluster definition"
cluster4:
type: complex
description: "A cluster definition"
include:
- ./variables/*.yml
targets:
default:
dev:
variables:
cluster3:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false
cluster4:
default:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false

View File

@ -0,0 +1,19 @@
targets:
default:
dev:
variables:
cluster1:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false
cluster2:
default:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false

View File

@ -124,8 +124,13 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) {
}, nil) }, nil)
clustersApi := mockWorkspaceClient.GetMockClustersAPI() clustersApi := mockWorkspaceClient.GetMockClustersAPI()
clustersApi.EXPECT().GetByClusterName(mock.Anything, "some-test-cluster").Return(&compute.ClusterDetails{ clustersApi.EXPECT().ListAll(mock.Anything, compute.ListClustersRequest{
ClusterId: "4321", FilterBy: &compute.ListClustersFilterBy{
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
},
}).Return([]compute.ClusterDetails{
{ClusterId: "4321", ClusterName: "some-test-cluster"},
{ClusterId: "9876", ClusterName: "some-other-cluster"},
}, nil) }, nil)
clusterPoliciesApi := mockWorkspaceClient.GetMockClusterPoliciesAPI() clusterPoliciesApi := mockWorkspaceClient.GetMockClusterPoliciesAPI()

View File

@ -19,7 +19,7 @@ import (
func promptForProfile(ctx context.Context, defaultValue string) (string, error) { func promptForProfile(ctx context.Context, defaultValue string) (string, error) {
if !cmdio.IsInTTY(ctx) { if !cmdio.IsInTTY(ctx) {
return "", fmt.Errorf("the command is being run in a non-interactive environment, please specify a profile using --profile") return "", nil
} }
prompt := cmdio.Prompt(ctx) prompt := cmdio.Prompt(ctx)

View File

@ -152,6 +152,12 @@ func TestGenerateJobCommand(t *testing.T) {
}, },
}, },
}, },
Parameters: []jobs.JobParameterDefinition{
{
Name: "empty",
Default: "",
},
},
}, },
}, nil) }, nil)
@ -198,6 +204,9 @@ func TestGenerateJobCommand(t *testing.T) {
- task_key: notebook_task - task_key: notebook_task
notebook_task: notebook_task:
notebook_path: %s notebook_path: %s
parameters:
- name: empty
default: ""
`, filepath.Join("..", "src", "notebook.py")), string(data)) `, filepath.Join("..", "src", "notebook.py")), string(data))
data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py")) data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py"))

View File

@ -1,13 +1,8 @@
package bundle package bundle
import ( import (
"encoding/json"
"reflect"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/schema" "github.com/databricks/cli/bundle/schema"
"github.com/databricks/cli/cmd/root" "github.com/databricks/cli/cmd/root"
"github.com/databricks/cli/libs/jsonschema"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -19,32 +14,8 @@ func newSchemaCommand() *cobra.Command {
} }
cmd.RunE = func(cmd *cobra.Command, args []string) error { cmd.RunE = func(cmd *cobra.Command, args []string) error {
// Load embedded schema descriptions. _, err := cmd.OutOrStdout().Write(schema.Bytes)
docs, err := schema.LoadBundleDescriptions() return err
if err != nil {
return err
}
// Generate the JSON schema from the bundle configuration struct in Go.
schema, err := schema.New(reflect.TypeOf(config.Root{}), docs)
if err != nil {
return err
}
// Target variable value overrides can be primitives, maps or sequences.
// Set an empty schema for them.
err = schema.SetByPath("targets.*.variables.*", jsonschema.Schema{})
if err != nil {
return err
}
// Print the JSON schema to stdout.
result, err := json.MarshalIndent(schema, "", " ")
if err != nil {
return err
}
cmd.OutOrStdout().Write(result)
return nil
} }
return cmd return cmd

View File

@ -29,6 +29,12 @@ func (f *progressLoggerFlag) resolveModeDefault(format flags.ProgressLogFormat)
} }
func (f *progressLoggerFlag) initializeContext(ctx context.Context) (context.Context, error) { func (f *progressLoggerFlag) initializeContext(ctx context.Context) (context.Context, error) {
// No need to initialize the logger if it's already set in the context. This
// happens in unit tests where the logger is setup as a fixture.
if _, ok := cmdio.FromContext(ctx); ok {
return ctx, nil
}
if f.log.level.String() != "disabled" && f.log.file.String() == "stderr" && if f.log.level.String() != "disabled" && f.log.file.String() == "stderr" &&
f.ProgressLogFormat == flags.ModeInplace { f.ProgressLogFormat == flags.ModeInplace {
return nil, fmt.Errorf("inplace progress logging cannot be used when log-file is stderr") return nil, fmt.Errorf("inplace progress logging cannot be used when log-file is stderr")

View File

@ -941,7 +941,12 @@ func newListArtifacts() *cobra.Command {
cmd.Long = `Get all artifacts. cmd.Long = `Get all artifacts.
List artifacts for a run. Takes an optional artifact_path prefix. If it is List artifacts for a run. Takes an optional artifact_path prefix. If it is
specified, the response contains only artifacts with the specified prefix.",` specified, the response contains only artifacts with the specified prefix.
This API does not support pagination when listing artifacts in UC Volumes. A
maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
/api/2.0/fs/directories{directory_path} for listing artifacts in UC Volumes,
which supports pagination. See [List directory contents | Files
API](/api/workspace/files/listdirectorycontents).`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)

View File

@ -88,7 +88,9 @@ func newAssign() *cobra.Command {
Arguments: Arguments:
WORKSPACE_ID: A workspace ID. WORKSPACE_ID: A workspace ID.
METASTORE_ID: The unique ID of the metastore. METASTORE_ID: The unique ID of the metastore.
DEFAULT_CATALOG_NAME: The name of the default catalog in the metastore.` DEFAULT_CATALOG_NAME: The name of the default catalog in the metastore. This field is depracted.
Please use "Default Namespace API" to configure the default catalog for a
Databricks workspace.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -665,7 +667,7 @@ func newUpdateAssignment() *cobra.Command {
// TODO: short flags // TODO: short flags
cmd.Flags().Var(&updateAssignmentJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&updateAssignmentJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`) cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog in the metastore.`)
cmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`) cmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`)
cmd.Use = "update-assignment WORKSPACE_ID" cmd.Use = "update-assignment WORKSPACE_ID"

View File

@ -117,9 +117,10 @@ func newGet() *cobra.Command {
Arguments: Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries, directories, experiments, files, instance-pools, jobs, notebooks,
registered-models, repos, serving-endpoints, or warehouses. pipelines, queries, registered-models, repos, serving-endpoints, or
warehouses.
REQUEST_OBJECT_ID: The id of the request object.` REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -245,9 +246,10 @@ func newSet() *cobra.Command {
Arguments: Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries, directories, experiments, files, instance-pools, jobs, notebooks,
registered-models, repos, serving-endpoints, or warehouses. pipelines, queries, registered-models, repos, serving-endpoints, or
warehouses.
REQUEST_OBJECT_ID: The id of the request object.` REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -319,9 +321,10 @@ func newUpdate() *cobra.Command {
Arguments: Arguments:
REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts, REQUEST_OBJECT_TYPE: The type of the request object. Can be one of the following: alerts,
authorization, clusters, cluster-policies, dbsql-dashboards, directories, authorization, clusters, cluster-policies, dashboards, dbsql-dashboards,
experiments, files, instance-pools, jobs, notebooks, pipelines, queries, directories, experiments, files, instance-pools, jobs, notebooks,
registered-models, repos, serving-endpoints, or warehouses. pipelines, queries, registered-models, repos, serving-endpoints, or
warehouses.
REQUEST_OBJECT_ID: The id of the request object.` REQUEST_OBJECT_ID: The id of the request object.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)

View File

@ -41,6 +41,7 @@ func New() *cobra.Command {
cmd.AddCommand(newGet()) cmd.AddCommand(newGet())
cmd.AddCommand(newGetRefresh()) cmd.AddCommand(newGetRefresh())
cmd.AddCommand(newListRefreshes()) cmd.AddCommand(newListRefreshes())
cmd.AddCommand(newRegenerateDashboard())
cmd.AddCommand(newRunRefresh()) cmd.AddCommand(newRunRefresh())
cmd.AddCommand(newUpdate()) cmd.AddCommand(newUpdate())
@ -503,6 +504,87 @@ func newListRefreshes() *cobra.Command {
return cmd return cmd
} }
// start regenerate-dashboard command
// Slice with functions to override default command behavior.
// Functions can be added from the `init()` function in manually curated files in this directory.
var regenerateDashboardOverrides []func(
*cobra.Command,
*catalog.RegenerateDashboardRequest,
)
func newRegenerateDashboard() *cobra.Command {
cmd := &cobra.Command{}
var regenerateDashboardReq catalog.RegenerateDashboardRequest
var regenerateDashboardJson flags.JsonFlag
// TODO: short flags
cmd.Flags().Var(&regenerateDashboardJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&regenerateDashboardReq.WarehouseId, "warehouse-id", regenerateDashboardReq.WarehouseId, `Optional argument to specify the warehouse for dashboard regeneration.`)
cmd.Use = "regenerate-dashboard TABLE_NAME"
cmd.Short = `Regenerate a monitoring dashboard.`
cmd.Long = `Regenerate a monitoring dashboard.
Regenerates the monitoring dashboard for the specified table.
The caller must either: 1. be an owner of the table's parent catalog 2. have
**USE_CATALOG** on the table's parent catalog and be an owner of the table's
parent schema 3. have the following permissions: - **USE_CATALOG** on the
table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
owner of the table
The call must be made from the workspace where the monitor was created. The
dashboard will be regenerated in the assets directory that was specified when
the monitor was created.
Arguments:
TABLE_NAME: Full name of the table.`
// This command is being previewed; hide from help output.
cmd.Hidden = true
cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
check := root.ExactArgs(1)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context()
w := root.WorkspaceClient(ctx)
if cmd.Flags().Changed("json") {
err = regenerateDashboardJson.Unmarshal(&regenerateDashboardReq)
if err != nil {
return err
}
}
regenerateDashboardReq.TableName = args[0]
response, err := w.QualityMonitors.RegenerateDashboard(ctx, regenerateDashboardReq)
if err != nil {
return err
}
return cmdio.Render(ctx, response)
}
// Disable completions since they are not applicable.
// Can be overridden by manual implementation in `override.go`.
cmd.ValidArgsFunction = cobra.NoFileCompletions
// Apply optional overrides to this command.
for _, fn := range regenerateDashboardOverrides {
fn(cmd, &regenerateDashboardReq)
}
return cmd
}
// start run-refresh command // start run-refresh command
// Slice with functions to override default command behavior. // Slice with functions to override default command behavior.

12
go.mod
View File

@ -3,9 +3,9 @@ module github.com/databricks/cli
go 1.22 go 1.22
require ( require (
github.com/Masterminds/semver/v3 v3.2.1 // MIT github.com/Masterminds/semver/v3 v3.3.0 // MIT
github.com/briandowns/spinner v1.23.1 // Apache 2.0 github.com/briandowns/spinner v1.23.1 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.45.0 // Apache 2.0 github.com/databricks/databricks-sdk-go v0.46.0 // Apache 2.0
github.com/fatih/color v1.17.0 // MIT github.com/fatih/color v1.17.0 // MIT
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause
@ -23,10 +23,10 @@ require (
github.com/stretchr/testify v1.9.0 // MIT github.com/stretchr/testify v1.9.0 // MIT
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
golang.org/x/mod v0.20.0 golang.org/x/mod v0.20.0
golang.org/x/oauth2 v0.22.0 golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0 golang.org/x/sync v0.8.0
golang.org/x/term v0.23.0 golang.org/x/term v0.24.0
golang.org/x/text v0.17.0 golang.org/x/text v0.18.0
gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
@ -61,7 +61,7 @@ require (
go.opentelemetry.io/otel/trace v1.24.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect
golang.org/x/crypto v0.24.0 // indirect golang.org/x/crypto v0.24.0 // indirect
golang.org/x/net v0.26.0 // indirect golang.org/x/net v0.26.0 // indirect
golang.org/x/sys v0.23.0 // indirect golang.org/x/sys v0.25.0 // indirect
golang.org/x/time v0.5.0 // indirect golang.org/x/time v0.5.0 // indirect
google.golang.org/api v0.182.0 // indirect google.golang.org/api v0.182.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect

24
go.sum generated
View File

@ -8,8 +8,8 @@ cloud.google.com/go/compute/metadata v0.3.0/go.mod h1:zFmK7XCadkQkj6TtorcaGlCW1h
dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= github.com/Masterminds/semver/v3 v3.3.0 h1:B8LGeaivUe71a5qox1ICM/JLl0NqZSW5CHyL+hmvYS0=
github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/semver/v3 v3.3.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg=
@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.45.0 h1:wdx5Wm/ESrahdHeq62WrjLeGjV4r722LLanD8ahI0Mo= github.com/databricks/databricks-sdk-go v0.46.0 h1:D0TxmtSVAOsdnfzH4OGtAmcq+8TyA7Z6fA6JEYhupeY=
github.com/databricks/databricks-sdk-go v0.45.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/databricks/databricks-sdk-go v0.46.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -191,8 +191,8 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY
golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ=
golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.22.0 h1:BzDx2FehcG7jJwgWLELCdmLuxk2i+x9UDpSiss2u0ZA= golang.org/x/oauth2 v0.23.0 h1:PbgcYx2W7i4LvjJWEbf0ngHV6qJYr86PkAV3bXdLEbs=
golang.org/x/oauth2 v0.22.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/oauth2 v0.23.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -208,14 +208,14 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.23.0 h1:YfKFowiIMvtgl1UERQoTPPToxltDeZfbj4H7dVUCwmM= golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
golang.org/x/sys v0.23.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU= golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk= golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc= golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

View File

@ -36,7 +36,8 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
wsDir := internal.TemporaryWorkspaceDir(t, w) wsDir := internal.TemporaryWorkspaceDir(t, w)
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
Target: "whatever", Target: "whatever",
@ -100,7 +101,8 @@ func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T)
wsDir := internal.TemporaryWorkspaceDir(t, w) wsDir := internal.TemporaryWorkspaceDir(t, w)
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
Target: "whatever", Target: "whatever",
@ -169,7 +171,8 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
touchEmptyFile(t, whlPath) touchEmptyFile(t, whlPath)
b := &bundle.Bundle{ b := &bundle.Bundle{
RootPath: dir, RootPath: dir,
SyncRootPath: dir,
Config: config.Root{ Config: config.Root{
Bundle: config.Bundle{ Bundle: config.Bundle{
Target: "whatever", Target: "whatever",

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -101,12 +102,15 @@ func TestAccAbortBind(t *testing.T) {
destroyBundle(t, ctx, bundleRoot) destroyBundle(t, ctx, bundleRoot)
}) })
// Bind should fail because prompting is not possible.
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId)) c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId))
// Simulate user aborting the bind. This is done by not providing any input to the prompt in non-interactive mode. // Expect error suggesting to use --auto-approve
_, _, err = c.Run() _, _, err = c.Run()
require.ErrorContains(t, err, "failed to bind the resource") assert.ErrorContains(t, err, "failed to bind the resource")
assert.ErrorContains(t, err, "This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
err = deployBundle(t, ctx, bundleRoot) err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err) require.NoError(t, err)

View File

@ -0,0 +1,8 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for the schema and pipeline names"
}
}
}

View File

@ -0,0 +1,25 @@
bundle:
name: "bundle-playground"
variables:
catalog:
description: The catalog the DLT pipeline should use.
default: main
resources:
pipelines:
foo:
name: test-pipeline-{{.unique_id}}
libraries:
- notebook:
path: ./nb.sql
development: true
catalog: ${var.catalog}
include:
- "*.yml"
targets:
development:
default: true

View File

@ -0,0 +1,2 @@
-- Databricks notebook source
select 1

View File

@ -120,8 +120,97 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) {
t.Setenv("BUNDLE_ROOT", bundleRoot) t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb") t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
stdout, _, err := c.Run() stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
assert.Contains(t, stderr.String(), "The following UC schemas will be deleted or recreated. Any underlying data may be lost:\n delete schema bar")
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
w := wt.W
nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV"))
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)
// deploy pipeline
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
// assert pipeline is created
pipelineName := "test-bundle-pipeline-" + uniqueId
pipeline, err := w.Pipelines.GetByName(ctx, pipelineName)
require.NoError(t, err)
assert.Equal(t, pipeline.Name, pipelineName)
// assert job is created
jobName := "test-bundle-job-" + uniqueId
job, err := w.Jobs.GetBySettingsName(ctx, jobName)
require.NoError(t, err)
assert.Equal(t, job.Settings.Name, jobName)
// delete resources.yml
err = os.Remove(filepath.Join(bundleRoot, "resources.yml"))
require.NoError(t, err)
// Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve.
t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock")
stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:
delete pipeline bar`)
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
}
func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
ctx, wt := acc.UcWorkspaceTest(t)
w := wt.W
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "recreate_pipeline", map[string]any{
"unique_id": uniqueId,
})
require.NoError(t, err)
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
t.Cleanup(func() {
destroyBundle(t, ctx, bundleRoot)
})
// Assert the pipeline is created
pipelineName := "test-pipeline-" + uniqueId
pipeline, err := w.Pipelines.GetByName(ctx, pipelineName)
require.NoError(t, err)
require.Equal(t, pipelineName, pipeline.Name)
// Redeploy the bundle, pointing the DLT pipeline to a different UC catalog.
t.Setenv("BUNDLE_ROOT", bundleRoot)
t.Setenv("TERM", "dumb")
c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--var=\"catalog=whatever\"")
stdout, stderr, err := c.Run()
assert.EqualError(t, err, root.ErrAlreadyPrinted.Error())
assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following DLT Pipelines along with the
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
properties such as the 'catalog' or 'storage' are changed:
recreate pipeline foo`)
assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")
} }

View File

@ -267,6 +267,8 @@ func (n normalizeOptions) normalizeString(typ reflect.Type, src dyn.Value, path
out = strconv.FormatInt(src.MustInt(), 10) out = strconv.FormatInt(src.MustInt(), 10)
case dyn.KindFloat: case dyn.KindFloat:
out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64) out = strconv.FormatFloat(src.MustFloat(), 'f', -1, 64)
case dyn.KindTime:
out = src.MustTime().String()
case dyn.KindNil: case dyn.KindNil:
// Return a warning if the field is present but has a null value. // Return a warning if the field is present but has a null value.
return dyn.InvalidValue, diags.Append(nullWarning(dyn.KindString, src, path)) return dyn.InvalidValue, diags.Append(nullWarning(dyn.KindString, src, path))

View File

@ -569,6 +569,14 @@ func TestNormalizeStringFromFloat(t *testing.T) {
assert.Equal(t, dyn.NewValue("1.2", vin.Locations()), vout) assert.Equal(t, dyn.NewValue("1.2", vin.Locations()), vout)
} }
func TestNormalizeStringFromTime(t *testing.T) {
var typ string
vin := dyn.NewValue(dyn.MustTime("2024-08-29"), []dyn.Location{{File: "file", Line: 1, Column: 1}})
vout, err := Normalize(&typ, vin)
assert.Empty(t, err)
assert.Equal(t, dyn.NewValue("2024-08-29", vin.Locations()), vout)
}
func TestNormalizeStringError(t *testing.T) { func TestNormalizeStringError(t *testing.T) {
var typ string var typ string
vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")}) vin := dyn.V(map[string]dyn.Value{"an": dyn.V("error")})

View File

@ -6,9 +6,7 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}` var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`)
var re = regexp.MustCompile(VariableRegex)
// ref represents a variable reference. // ref represents a variable reference.
// It is a string [dyn.Value] contained in a larger [dyn.Value]. // It is a string [dyn.Value] contained in a larger [dyn.Value].

View File

@ -2,7 +2,6 @@ package dyn
import ( import (
"fmt" "fmt"
"time"
) )
type Kind int type Kind int
@ -34,7 +33,7 @@ func kindOf(v any) Kind {
return KindInt return KindInt
case float32, float64: case float32, float64:
return KindFloat return KindFloat
case time.Time: case Time:
return KindTime return KindTime
case nil: case nil:
return KindNil return KindNil

View File

@ -83,16 +83,16 @@ func TestOverride_Primitive(t *testing.T) {
{ {
name: "time (updated)", name: "time (updated)",
state: visitorState{updated: []string{"root"}}, state: visitorState{updated: []string{"root"}},
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}), left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
right: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}), right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
expected: dyn.NewValue(time.UnixMilli(10001), []dyn.Location{rightLocation}), expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10001)), []dyn.Location{rightLocation}),
}, },
{ {
name: "time (not updated)", name: "time (not updated)",
state: visitorState{}, state: visitorState{},
left: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}), left: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
right: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{rightLocation}), right: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{rightLocation}),
expected: dyn.NewValue(time.UnixMilli(10000), []dyn.Location{leftLocation}), expected: dyn.NewValue(dyn.FromTime(time.UnixMilli(10000)), []dyn.Location{leftLocation}),
}, },
{ {
name: "different types (updated)", name: "different types (updated)",

62
libs/dyn/time.go Normal file
View File

@ -0,0 +1,62 @@
package dyn
import (
"fmt"
"time"
)
// Time represents a time-like primitive value.
//
// It represents a timestamp and includes the original string value
// that was parsed to create the timestamp. This makes it possible
// to coalesce a value that YAML interprets as a timestamp back into
// a string without losing information.
type Time struct {
t time.Time
s string
}
// NewTime creates a new Time from the given string.
func NewTime(str string) (Time, error) {
// Try a couple of layouts
for _, layout := range []string{
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
"2006-1-2 15:4:5.999999999", // space separated with no time zone
"2006-1-2", // date only
} {
t, terr := time.Parse(layout, str)
if terr == nil {
return Time{t: t, s: str}, nil
}
}
return Time{}, fmt.Errorf("invalid time value: %q", str)
}
// MustTime creates a new Time from the given string.
// It panics if the string cannot be parsed.
func MustTime(str string) Time {
t, err := NewTime(str)
if err != nil {
panic(err)
}
return t
}
// FromTime creates a new Time from the given time.Time.
// It uses the RFC3339Nano format for its string representation.
// This guarantees that it can roundtrip into a string without losing information.
func FromTime(t time.Time) Time {
return Time{t: t, s: t.Format(time.RFC3339Nano)}
}
// Time returns the time.Time value.
func (t Time) Time() time.Time {
return t.t
}
// String returns the original string value that was parsed to create the timestamp.
func (t Time) String() string {
return t.s
}

41
libs/dyn/time_test.go Normal file
View File

@ -0,0 +1,41 @@
package dyn_test
import (
"testing"
"time"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)
func TestTimeValid(t *testing.T) {
for _, tc := range []string{
"2024-08-29",
"2024-01-15T12:34:56.789012345Z",
} {
tm, err := dyn.NewTime(tc)
if assert.NoError(t, err) {
assert.NotEqual(t, time.Time{}, tm.Time())
assert.Equal(t, tc, tm.String())
}
}
}
func TestTimeInvalid(t *testing.T) {
tm, err := dyn.NewTime("invalid")
assert.Error(t, err)
assert.Equal(t, dyn.Time{}, tm)
}
func TestTimeFromTime(t *testing.T) {
tref := time.Now()
t1 := dyn.FromTime(tref)
// Verify that the underlying value is the same.
assert.Equal(t, tref, t1.Time())
// Verify that the string representation can be used to construct the same.
t2, err := dyn.NewTime(t1.String())
assert.NoError(t, err)
assert.True(t, t1.Time().Equal(t2.Time()))
}

View File

@ -127,7 +127,8 @@ func (v Value) AsAny() any {
case KindFloat: case KindFloat:
return v.v return v.v
case KindTime: case KindTime:
return v.v t := v.v.(Time)
return t.Time()
default: default:
// Panic because we only want to deal with known types. // Panic because we only want to deal with known types.
panic(fmt.Sprintf("invalid kind: %d", v.k)) panic(fmt.Sprintf("invalid kind: %d", v.k))

View File

@ -2,7 +2,6 @@ package dyn
import ( import (
"fmt" "fmt"
"time"
) )
// AsMap returns the underlying mapping if this value is a map, // AsMap returns the underlying mapping if this value is a map,
@ -123,14 +122,14 @@ func (v Value) MustFloat() float64 {
// AsTime returns the underlying time if this value is a time, // AsTime returns the underlying time if this value is a time,
// the zero value and false otherwise. // the zero value and false otherwise.
func (v Value) AsTime() (time.Time, bool) { func (v Value) AsTime() (Time, bool) {
vv, ok := v.v.(time.Time) vv, ok := v.v.(Time)
return vv, ok return vv, ok
} }
// MustTime returns the underlying time if this value is a time, // MustTime returns the underlying time if this value is a time,
// panics otherwise. // panics otherwise.
func (v Value) MustTime() time.Time { func (v Value) MustTime() Time {
vv, ok := v.AsTime() vv, ok := v.AsTime()
if !ok || v.k != KindTime { if !ok || v.k != KindTime {
panic(fmt.Sprintf("expected kind %s, got %s", KindTime, v.k)) panic(fmt.Sprintf("expected kind %s, got %s", KindTime, v.k))

View File

@ -143,7 +143,7 @@ func TestValueUnderlyingFloat(t *testing.T) {
} }
func TestValueUnderlyingTime(t *testing.T) { func TestValueUnderlyingTime(t *testing.T) {
v := dyn.V(time.Now()) v := dyn.V(dyn.FromTime(time.Now()))
vv1, ok := v.AsTime() vv1, ok := v.AsTime()
assert.True(t, ok) assert.True(t, ok)

View File

@ -70,7 +70,7 @@ type visitOptions struct {
func visit(v Value, prefix Path, suffix Pattern, opts visitOptions) (Value, error) { func visit(v Value, prefix Path, suffix Pattern, opts visitOptions) (Value, error) {
if len(suffix) == 0 { if len(suffix) == 0 {
return opts.fn(prefix, v) return opts.fn(slices.Clone(prefix), v)
} }
// Initialize prefix if it is empty. // Initialize prefix if it is empty.

View File

@ -21,7 +21,7 @@ func Foreach(fn MapFunc) MapFunc {
for _, pair := range m.Pairs() { for _, pair := range m.Pairs() {
pk := pair.Key pk := pair.Key
pv := pair.Value pv := pair.Value
nv, err := fn(append(p, Key(pk.MustString())), pv) nv, err := fn(p.Append(Key(pk.MustString())), pv)
if err != nil { if err != nil {
return InvalidValue, err return InvalidValue, err
} }
@ -32,7 +32,7 @@ func Foreach(fn MapFunc) MapFunc {
s := slices.Clone(v.MustSequence()) s := slices.Clone(v.MustSequence())
for i, value := range s { for i, value := range s {
var err error var err error
s[i], err = fn(append(p, Index(i)), value) s[i], err = fn(p.Append(Index(i)), value)
if err != nil { if err != nil {
return InvalidValue, err return InvalidValue, err
} }

Some files were not shown because too many files have changed in this diff Show More