mirror of https://github.com/databricks/cli.git
Merge branch 'main' into patch-1
This commit is contained in:
commit
cfd953697f
|
@ -1 +1 @@
|
|||
37b925eba37dfb3d7e05b6ba2d458454ce62d3a0
|
||||
7437dabb9dadee402c1fc060df4c1ce8cc5369f0
|
|
@ -7,7 +7,7 @@ package account
|
|||
import (
|
||||
"github.com/databricks/cli/cmd/root"
|
||||
"github.com/spf13/cobra"
|
||||
{{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) }}
|
||||
{{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }}
|
||||
{{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}}
|
||||
)
|
||||
|
||||
|
@ -17,7 +17,7 @@ func New() *cobra.Command {
|
|||
Short: `Databricks Account Commands`,
|
||||
}
|
||||
|
||||
{{range .Services}}{{if and .IsAccounts (not .HasParent)}}{{if not (in $excludes .KebabName) -}}
|
||||
{{range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}}
|
||||
cmd.AddCommand({{.SnakeName}}.New())
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
|
|
|
@ -14,14 +14,14 @@ package workspace
|
|||
|
||||
import (
|
||||
"github.com/databricks/cli/cmd/root"
|
||||
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) }}
|
||||
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) }}
|
||||
{{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}}
|
||||
)
|
||||
|
||||
func All() []*cobra.Command {
|
||||
var out []*cobra.Command
|
||||
|
||||
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}{{if not (in $excludes .KebabName) -}}
|
||||
{{range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}}{{if not (in $excludes .KebabName) -}}
|
||||
out = append(out, {{.SnakeName}}.New())
|
||||
{{end}}{{end}}{{end}}
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ import (
|
|||
"dbsql-permissions"
|
||||
"account-access-control-proxy"
|
||||
"files"
|
||||
"serving-endpoints-data-plane"
|
||||
}}
|
||||
|
||||
{{if not (in $excludes .KebabName) }}
|
||||
|
|
48
CHANGELOG.md
48
CHANGELOG.md
|
@ -1,5 +1,53 @@
|
|||
# Version changelog
|
||||
|
||||
## 0.222.0
|
||||
|
||||
CLI:
|
||||
* Add link to documentation for Homebrew installation to README ([#1505](https://github.com/databricks/cli/pull/1505)).
|
||||
* Fix `databricks configure` to use `DATABRICKS_CONFIG_FILE` environment variable if exists as config file ([#1325](https://github.com/databricks/cli/pull/1325)).
|
||||
|
||||
Bundles:
|
||||
|
||||
The Terraform upgrade to v1.48.0 includes a fix for library order not being respected.
|
||||
|
||||
* Fix conditional in query in `default-sql` template ([#1479](https://github.com/databricks/cli/pull/1479)).
|
||||
* Remove user credentials specified in the Git origin URL ([#1494](https://github.com/databricks/cli/pull/1494)).
|
||||
* Serialize dynamic value for `bundle validate` output ([#1499](https://github.com/databricks/cli/pull/1499)).
|
||||
* Override variables with lookup value even if values has default value set ([#1504](https://github.com/databricks/cli/pull/1504)).
|
||||
* Pause quality monitors when "mode: development" is used ([#1481](https://github.com/databricks/cli/pull/1481)).
|
||||
* Return `fs.ModeDir` for Git folders in the workspace ([#1521](https://github.com/databricks/cli/pull/1521)).
|
||||
* Upgrade TF provider to 1.48.0 ([#1527](https://github.com/databricks/cli/pull/1527)).
|
||||
* Added support for complex variables ([#1467](https://github.com/databricks/cli/pull/1467)).
|
||||
|
||||
Internal:
|
||||
* Add randIntn function ([#1475](https://github.com/databricks/cli/pull/1475)).
|
||||
* Avoid multiple file tree traversals on bundle deploy ([#1493](https://github.com/databricks/cli/pull/1493)).
|
||||
* Clean up unused code ([#1502](https://github.com/databricks/cli/pull/1502)).
|
||||
* Use `dyn.InvalidValue` to indicate absence ([#1507](https://github.com/databricks/cli/pull/1507)).
|
||||
* Add ApplyPythonMutator ([#1430](https://github.com/databricks/cli/pull/1430)).
|
||||
* Set bool pointer to disable lock ([#1516](https://github.com/databricks/cli/pull/1516)).
|
||||
* Allow the any type to be set to nil in `convert.FromTyped` ([#1518](https://github.com/databricks/cli/pull/1518)).
|
||||
* Properly deal with nil values in `convert.FromTyped` ([#1511](https://github.com/databricks/cli/pull/1511)).
|
||||
* Return `dyn.InvalidValue` instead of `dyn.NilValue` when errors happen ([#1514](https://github.com/databricks/cli/pull/1514)).
|
||||
* PythonMutator: replace stdin/stdout with files ([#1512](https://github.com/databricks/cli/pull/1512)).
|
||||
* Add context type and value to path rewriting ([#1525](https://github.com/databricks/cli/pull/1525)).
|
||||
|
||||
API Changes:
|
||||
* Added schedule CRUD commands to `databricks lakeview`.
|
||||
* Added subscription CRUD commands to `databricks lakeview`.
|
||||
* Added `databricks apps start` command.
|
||||
|
||||
OpenAPI commit 7437dabb9dadee402c1fc060df4c1ce8cc5369f0 (2024-06-24)
|
||||
|
||||
Dependency updates:
|
||||
* Bump golang.org/x/text from 0.15.0 to 0.16.0 ([#1482](https://github.com/databricks/cli/pull/1482)).
|
||||
* Bump golang.org/x/term from 0.20.0 to 0.21.0 ([#1483](https://github.com/databricks/cli/pull/1483)).
|
||||
* Bump golang.org/x/mod from 0.17.0 to 0.18.0 ([#1484](https://github.com/databricks/cli/pull/1484)).
|
||||
* Bump golang.org/x/oauth2 from 0.20.0 to 0.21.0 ([#1485](https://github.com/databricks/cli/pull/1485)).
|
||||
* Bump github.com/briandowns/spinner from 1.23.0 to 1.23.1 ([#1495](https://github.com/databricks/cli/pull/1495)).
|
||||
* Bump github.com/spf13/cobra from 1.8.0 to 1.8.1 ([#1496](https://github.com/databricks/cli/pull/1496)).
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.42.0 to 0.43.0 ([#1522](https://github.com/databricks/cli/pull/1522)).
|
||||
|
||||
## 0.221.1
|
||||
|
||||
Bundles:
|
||||
|
|
|
@ -32,7 +32,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) dia
|
|||
targets := v.Get("targets")
|
||||
|
||||
// Return an error if both "environments" and "targets" are set.
|
||||
if environments != dyn.InvalidValue && targets != dyn.InvalidValue {
|
||||
if environments.Kind() != dyn.KindInvalid && targets.Kind() != dyn.KindInvalid {
|
||||
return dyn.InvalidValue, fmt.Errorf(
|
||||
"both 'environments' and 'targets' are specified; only 'targets' should be used: %s",
|
||||
environments.Location().String(),
|
||||
|
@ -40,7 +40,7 @@ func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) dia
|
|||
}
|
||||
|
||||
// Rewrite "environments" to "targets".
|
||||
if environments != dyn.InvalidValue && targets == dyn.InvalidValue {
|
||||
if environments.Kind() != dyn.KindInvalid && targets.Kind() == dyn.KindInvalid {
|
||||
nv, err := dyn.Set(v, "targets", environments)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
|
|
|
@ -32,7 +32,7 @@ func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string {
|
|||
|
||||
func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
if v == dyn.NilValue {
|
||||
if v.Kind() == dyn.KindNil {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ func (m *mergeJobTasks) taskKeyString(v dyn.Value) string {
|
|||
|
||||
func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
if v == dyn.NilValue {
|
||||
if v.Kind() == dyn.KindNil {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string {
|
|||
|
||||
func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
if v == dyn.NilValue {
|
||||
if v.Kind() == dyn.KindNil {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,7 @@ func TestResolveClusterReference(t *testing.T) {
|
|||
},
|
||||
},
|
||||
"some-variable": {
|
||||
Value: &justString,
|
||||
Value: justString,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -53,8 +53,8 @@ func TestResolveClusterReference(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value)
|
||||
require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value)
|
||||
require.Equal(t, "1234-5678-abcd", b.Config.Variables["my-cluster-id-1"].Value)
|
||||
require.Equal(t, "9876-5432-xywz", b.Config.Variables["my-cluster-id-2"].Value)
|
||||
}
|
||||
|
||||
func TestResolveNonExistentClusterReference(t *testing.T) {
|
||||
|
@ -69,7 +69,7 @@ func TestResolveNonExistentClusterReference(t *testing.T) {
|
|||
},
|
||||
},
|
||||
"some-variable": {
|
||||
Value: &justString,
|
||||
Value: justString,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -105,7 +105,7 @@ func TestNoLookupIfVariableIsSet(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value)
|
||||
require.Equal(t, "random value", b.Config.Variables["my-cluster-id"].Value)
|
||||
}
|
||||
|
||||
func TestResolveServicePrincipal(t *testing.T) {
|
||||
|
@ -132,14 +132,11 @@ func TestResolveServicePrincipal(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, ResolveResourceReferences())
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value)
|
||||
require.Equal(t, "app-1234", b.Config.Variables["my-sp"].Value)
|
||||
}
|
||||
|
||||
func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
|
||||
s := func(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
s := "bar"
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
|
@ -147,7 +144,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
|
|||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"foo": {
|
||||
Value: s("bar"),
|
||||
Value: s,
|
||||
},
|
||||
"lookup": {
|
||||
Lookup: &variable.Lookup{
|
||||
|
@ -168,7 +165,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
|
|||
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences()))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster)
|
||||
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value)
|
||||
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
|
||||
}
|
||||
|
||||
func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
|
||||
|
@ -197,22 +194,15 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) {
|
||||
s := func(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Target: "dev",
|
||||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"foo": {
|
||||
Value: s("bar"),
|
||||
},
|
||||
"lookup": {
|
||||
Lookup: &variable.Lookup{
|
||||
Cluster: "cluster-${var.foo}-${bundle.target}",
|
||||
Cluster: "cluster-${bundle.target}",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -227,5 +217,5 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences()))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value)
|
||||
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ type resolveVariableReferences struct {
|
|||
prefixes []string
|
||||
pattern dyn.Pattern
|
||||
lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error)
|
||||
skipFn func(dyn.Value) bool
|
||||
}
|
||||
|
||||
func ResolveVariableReferences(prefixes ...string) bundle.Mutator {
|
||||
|
@ -31,6 +32,18 @@ func ResolveVariableReferencesInLookup() bundle.Mutator {
|
|||
}, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables}
|
||||
}
|
||||
|
||||
func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
|
||||
return &resolveVariableReferences{prefixes: []string{
|
||||
"bundle",
|
||||
"workspace",
|
||||
"variables",
|
||||
},
|
||||
pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")),
|
||||
lookupFn: lookupForComplexVariables,
|
||||
skipFn: skipResolvingInNonComplexVariables,
|
||||
}
|
||||
}
|
||||
|
||||
func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
||||
// Future opportunity: if we lookup this path in both the given root
|
||||
// and the synthesized root, we know if it was explicitly set or implied to be empty.
|
||||
|
@ -38,6 +51,34 @@ func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
|||
return dyn.GetByPath(v, path)
|
||||
}
|
||||
|
||||
func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
||||
if path[0].Key() != "variables" {
|
||||
return lookup(v, path)
|
||||
}
|
||||
|
||||
varV, err := dyn.GetByPath(v, path[:len(path)-1])
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
var vv variable.Variable
|
||||
err = convert.ToTyped(&vv, varV)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
if vv.Type == variable.VariableTypeComplex {
|
||||
return dyn.InvalidValue, fmt.Errorf("complex variables cannot contain references to another complex variables")
|
||||
}
|
||||
|
||||
return lookup(v, path)
|
||||
}
|
||||
|
||||
func skipResolvingInNonComplexVariables(v dyn.Value) bool {
|
||||
_, ok := v.AsMap()
|
||||
return !ok
|
||||
}
|
||||
|
||||
func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
||||
if path[0].Key() != "variables" {
|
||||
return lookup(v, path)
|
||||
|
@ -100,17 +141,27 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
|
|||
// Resolve variable references in all values.
|
||||
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
|
||||
// Rewrite the shorthand path ${var.foo} into ${variables.foo.value}.
|
||||
if path.HasPrefix(varPath) && len(path) == 2 {
|
||||
path = dyn.NewPath(
|
||||
if path.HasPrefix(varPath) {
|
||||
newPath := dyn.NewPath(
|
||||
dyn.Key("variables"),
|
||||
path[1],
|
||||
dyn.Key("value"),
|
||||
)
|
||||
|
||||
if len(path) > 2 {
|
||||
newPath = newPath.Append(path[2:]...)
|
||||
}
|
||||
|
||||
path = newPath
|
||||
}
|
||||
|
||||
// Perform resolution only if the path starts with one of the specified prefixes.
|
||||
for _, prefix := range prefixes {
|
||||
if path.HasPrefix(prefix) {
|
||||
// Skip resolution if there is a skip function and it returns true.
|
||||
if m.skipFn != nil && m.skipFn(v) {
|
||||
return dyn.InvalidValue, dynvar.ErrSkipResolution
|
||||
}
|
||||
return m.lookupFn(normalized, path)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,10 +43,6 @@ func TestResolveVariableReferences(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
||||
s := func(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
|
@ -57,7 +53,7 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
|||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"foo": {
|
||||
Value: s("bar"),
|
||||
Value: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -195,3 +191,182 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
|
|||
assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers)
|
||||
assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice)
|
||||
}
|
||||
|
||||
func TestResolveComplexVariable(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Name: "example",
|
||||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"cluster": {
|
||||
Value: map[string]any{
|
||||
"node_type_id": "Standard_DS3_v2",
|
||||
"num_workers": 2,
|
||||
},
|
||||
Type: variable.VariableTypeComplex,
|
||||
},
|
||||
},
|
||||
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
"job1": {
|
||||
JobSettings: &jobs.JobSettings{
|
||||
JobClusters: []jobs.JobCluster{
|
||||
{
|
||||
NewCluster: compute.ClusterSpec{
|
||||
NodeTypeId: "random",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Assign the variables to the dynamic configuration.
|
||||
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
var p dyn.Path
|
||||
var err error
|
||||
|
||||
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]")
|
||||
v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}"))
|
||||
require.NoError(t, err)
|
||||
|
||||
return v, nil
|
||||
})
|
||||
return diag.FromErr(err)
|
||||
})
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables"))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId)
|
||||
require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers)
|
||||
}
|
||||
|
||||
func TestResolveComplexVariableReferencesToFields(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Name: "example",
|
||||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"cluster": {
|
||||
Value: map[string]any{
|
||||
"node_type_id": "Standard_DS3_v2",
|
||||
"num_workers": 2,
|
||||
},
|
||||
Type: variable.VariableTypeComplex,
|
||||
},
|
||||
},
|
||||
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
"job1": {
|
||||
JobSettings: &jobs.JobSettings{
|
||||
JobClusters: []jobs.JobCluster{
|
||||
{
|
||||
NewCluster: compute.ClusterSpec{
|
||||
NodeTypeId: "random",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Assign the variables to the dynamic configuration.
|
||||
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
var p dyn.Path
|
||||
var err error
|
||||
|
||||
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0].new_cluster")
|
||||
v, err = dyn.SetByPath(v, p.Append(dyn.Key("node_type_id")), dyn.V("${var.cluster.node_type_id}"))
|
||||
require.NoError(t, err)
|
||||
|
||||
return v, nil
|
||||
})
|
||||
return diag.FromErr(err)
|
||||
})
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables"))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId)
|
||||
}
|
||||
|
||||
func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Name: "example",
|
||||
},
|
||||
Variables: map[string]*variable.Variable{
|
||||
"cluster": {
|
||||
Value: map[string]any{
|
||||
"node_type_id": "Standard_DS3_v2",
|
||||
"num_workers": 2,
|
||||
"spark_conf": "${var.spark_conf}",
|
||||
},
|
||||
Type: variable.VariableTypeComplex,
|
||||
},
|
||||
"spark_conf": {
|
||||
Value: map[string]any{
|
||||
"spark.executor.memory": "4g",
|
||||
"spark.executor.cores": "2",
|
||||
},
|
||||
Type: variable.VariableTypeComplex,
|
||||
},
|
||||
},
|
||||
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
"job1": {
|
||||
JobSettings: &jobs.JobSettings{
|
||||
JobClusters: []jobs.JobCluster{
|
||||
{
|
||||
NewCluster: compute.ClusterSpec{
|
||||
NodeTypeId: "random",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
// Assign the variables to the dynamic configuration.
|
||||
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
var p dyn.Path
|
||||
var err error
|
||||
|
||||
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]")
|
||||
v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}"))
|
||||
require.NoError(t, err)
|
||||
|
||||
return v, nil
|
||||
})
|
||||
return diag.FromErr(err)
|
||||
})
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
diags = bundle.Apply(ctx, b, bundle.Seq(ResolveVariableReferencesInComplexVariables(), ResolveVariableReferences("bundle", "workspace", "variables")))
|
||||
require.ErrorContains(t, diags.Error(), "complex variables cannot contain references to another complex variables")
|
||||
}
|
||||
|
|
|
@ -53,14 +53,20 @@ func (e errBothSpAndUserSpecified) Error() string {
|
|||
}
|
||||
|
||||
func validateRunAs(b *bundle.Bundle) error {
|
||||
runAs := b.Config.RunAs
|
||||
|
||||
// Error if neither service_principal_name nor user_name are specified
|
||||
if runAs.ServicePrincipalName == "" && runAs.UserName == "" {
|
||||
return fmt.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as"))
|
||||
neitherSpecifiedErr := fmt.Errorf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s", b.Config.GetLocation("run_as"))
|
||||
// Error if neither service_principal_name nor user_name are specified, but the
|
||||
// run_as section is present.
|
||||
if b.Config.Value().Get("run_as").Kind() == dyn.KindNil {
|
||||
return neitherSpecifiedErr
|
||||
}
|
||||
// Error if one or both of service_principal_name and user_name are specified,
|
||||
// but with empty values.
|
||||
if b.Config.RunAs.ServicePrincipalName == "" && b.Config.RunAs.UserName == "" {
|
||||
return neitherSpecifiedErr
|
||||
}
|
||||
|
||||
// Error if both service_principal_name and user_name are specified
|
||||
runAs := b.Config.RunAs
|
||||
if runAs.UserName != "" && runAs.ServicePrincipalName != "" {
|
||||
return errBothSpAndUserSpecified{
|
||||
spName: runAs.ServicePrincipalName,
|
||||
|
@ -163,8 +169,7 @@ func setPipelineOwnersToRunAsIdentity(b *bundle.Bundle) {
|
|||
|
||||
func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
// Mutator is a no-op if run_as is not specified in the bundle
|
||||
runAs := b.Config.RunAs
|
||||
if runAs == nil {
|
||||
if b.Config.Value().Get("run_as").Kind() == dyn.KindInvalid {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
@ -30,6 +30,10 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di
|
|||
// case: read and set variable value from process environment
|
||||
envVarName := bundleVarPrefix + name
|
||||
if val, ok := env.Lookup(ctx, envVarName); ok {
|
||||
if v.IsComplex() {
|
||||
return diag.Errorf(`setting via environment variables (%s) is not supported for complex variable %s`, envVarName, name)
|
||||
}
|
||||
|
||||
err := v.Set(val)
|
||||
if err != nil {
|
||||
return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err)
|
||||
|
@ -45,9 +49,9 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di
|
|||
|
||||
// case: Set the variable to its default value
|
||||
if v.HasDefault() {
|
||||
err := v.Set(*v.Default)
|
||||
err := v.Set(v.Default)
|
||||
if err != nil {
|
||||
return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err)
|
||||
return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, v.Default, name, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
|
|||
defaultVal := "default"
|
||||
variable := variable.Variable{
|
||||
Description: "a test variable",
|
||||
Default: &defaultVal,
|
||||
Default: defaultVal,
|
||||
}
|
||||
|
||||
// set value for variable as an environment variable
|
||||
|
@ -23,19 +23,19 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
|
|||
|
||||
diags := setVariable(context.Background(), &variable, "foo")
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, *variable.Value, "process-env")
|
||||
assert.Equal(t, variable.Value, "process-env")
|
||||
}
|
||||
|
||||
func TestSetVariableUsingDefaultValue(t *testing.T) {
|
||||
defaultVal := "default"
|
||||
variable := variable.Variable{
|
||||
Description: "a test variable",
|
||||
Default: &defaultVal,
|
||||
Default: defaultVal,
|
||||
}
|
||||
|
||||
diags := setVariable(context.Background(), &variable, "foo")
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, *variable.Value, "default")
|
||||
assert.Equal(t, variable.Value, "default")
|
||||
}
|
||||
|
||||
func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
|
||||
|
@ -43,15 +43,15 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
|
|||
val := "assigned-value"
|
||||
variable := variable.Variable{
|
||||
Description: "a test variable",
|
||||
Default: &defaultVal,
|
||||
Value: &val,
|
||||
Default: defaultVal,
|
||||
Value: val,
|
||||
}
|
||||
|
||||
// since a value is already assigned to the variable, it would not be overridden
|
||||
// by the default value
|
||||
diags := setVariable(context.Background(), &variable, "foo")
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, *variable.Value, "assigned-value")
|
||||
assert.Equal(t, variable.Value, "assigned-value")
|
||||
}
|
||||
|
||||
func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
||||
|
@ -59,8 +59,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
|||
val := "assigned-value"
|
||||
variable := variable.Variable{
|
||||
Description: "a test variable",
|
||||
Default: &defaultVal,
|
||||
Value: &val,
|
||||
Default: defaultVal,
|
||||
Value: val,
|
||||
}
|
||||
|
||||
// set value for variable as an environment variable
|
||||
|
@ -70,7 +70,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
|||
// by the value from environment
|
||||
diags := setVariable(context.Background(), &variable, "foo")
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, *variable.Value, "assigned-value")
|
||||
assert.Equal(t, variable.Value, "assigned-value")
|
||||
}
|
||||
|
||||
func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
|
||||
|
@ -92,15 +92,15 @@ func TestSetVariablesMutator(t *testing.T) {
|
|||
Variables: map[string]*variable.Variable{
|
||||
"a": {
|
||||
Description: "resolved to default value",
|
||||
Default: &defaultValForA,
|
||||
Default: defaultValForA,
|
||||
},
|
||||
"b": {
|
||||
Description: "resolved from environment vairables",
|
||||
Default: &defaultValForB,
|
||||
Default: defaultValForB,
|
||||
},
|
||||
"c": {
|
||||
Description: "has already been assigned a value",
|
||||
Value: &valForC,
|
||||
Value: valForC,
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -110,7 +110,22 @@ func TestSetVariablesMutator(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(context.Background(), b, SetVariables())
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, "default-a", *b.Config.Variables["a"].Value)
|
||||
assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value)
|
||||
assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value)
|
||||
assert.Equal(t, "default-a", b.Config.Variables["a"].Value)
|
||||
assert.Equal(t, "env-var-b", b.Config.Variables["b"].Value)
|
||||
assert.Equal(t, "assigned-val-c", b.Config.Variables["c"].Value)
|
||||
}
|
||||
|
||||
func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) {
|
||||
defaultVal := "default"
|
||||
variable := variable.Variable{
|
||||
Description: "a test variable",
|
||||
Default: defaultVal,
|
||||
Type: variable.VariableTypeComplex,
|
||||
}
|
||||
|
||||
// set value for variable as an environment variable
|
||||
t.Setenv("BUNDLE_VAR_foo", "process-env")
|
||||
|
||||
diags := setVariable(context.Background(), &variable, "foo")
|
||||
assert.ErrorContains(t, diags.Error(), "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
|
||||
}
|
||||
|
|
|
@ -33,9 +33,7 @@ func (err ErrIsNotNotebook) Error() string {
|
|||
return fmt.Sprintf("file at %s is not a notebook", err.path)
|
||||
}
|
||||
|
||||
type translatePaths struct {
|
||||
seen map[string]string
|
||||
}
|
||||
type translatePaths struct{}
|
||||
|
||||
// TranslatePaths converts paths to local notebook files into paths in the workspace file system.
|
||||
func TranslatePaths() bundle.Mutator {
|
||||
|
@ -48,6 +46,18 @@ func (m *translatePaths) Name() string {
|
|||
|
||||
type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error)
|
||||
|
||||
// translateContext is a context for rewriting paths in a config.
|
||||
// It is freshly instantiated on every mutator apply call.
|
||||
// It provides access to the underlying bundle object such that
|
||||
// it doesn't have to be passed around explicitly.
|
||||
type translateContext struct {
|
||||
b *bundle.Bundle
|
||||
|
||||
// seen is a map of local paths to their corresponding remote paths.
|
||||
// If a local path has already been successfully resolved, we do not need to resolve it again.
|
||||
seen map[string]string
|
||||
}
|
||||
|
||||
// rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function
|
||||
//
|
||||
// It takes these arguments:
|
||||
|
@ -57,14 +67,13 @@ type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (
|
|||
// This logic is different between regular files or notebooks.
|
||||
//
|
||||
// The function returns an error if it is impossible to rewrite the given relative path.
|
||||
func (m *translatePaths) rewritePath(
|
||||
func (t *translateContext) rewritePath(
|
||||
dir string,
|
||||
b *bundle.Bundle,
|
||||
p *string,
|
||||
fn rewriteFunc,
|
||||
) error {
|
||||
// We assume absolute paths point to a location in the workspace
|
||||
if path.IsAbs(filepath.ToSlash(*p)) {
|
||||
if path.IsAbs(*p) {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -80,13 +89,14 @@ func (m *translatePaths) rewritePath(
|
|||
|
||||
// Local path is relative to the directory the resource was defined in.
|
||||
localPath := filepath.Join(dir, filepath.FromSlash(*p))
|
||||
if interp, ok := m.seen[localPath]; ok {
|
||||
if interp, ok := t.seen[localPath]; ok {
|
||||
*p = interp
|
||||
return nil
|
||||
}
|
||||
|
||||
// Remote path must be relative to the bundle root.
|
||||
localRelPath, err := filepath.Rel(b.RootPath, localPath)
|
||||
// Local path must be contained in the bundle root.
|
||||
// If it isn't, it won't be synchronized into the workspace.
|
||||
localRelPath, err := filepath.Rel(t.b.RootPath, localPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -95,20 +105,20 @@ func (m *translatePaths) rewritePath(
|
|||
}
|
||||
|
||||
// Prefix remote path with its remote root path.
|
||||
remotePath := path.Join(b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath))
|
||||
remotePath := path.Join(t.b.Config.Workspace.FilePath, filepath.ToSlash(localRelPath))
|
||||
|
||||
// Convert local path into workspace path via specified function.
|
||||
interp, err := fn(*p, localPath, localRelPath, filepath.ToSlash(remotePath))
|
||||
interp, err := fn(*p, localPath, localRelPath, remotePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
*p = interp
|
||||
m.seen[localPath] = interp
|
||||
t.seen[localPath] = interp
|
||||
return nil
|
||||
}
|
||||
|
||||
func translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
func (t *translateContext) translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
nb, _, err := notebook.Detect(localFullPath)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return "", fmt.Errorf("notebook %s not found", literal)
|
||||
|
@ -124,7 +134,7 @@ func translateNotebookPath(literal, localFullPath, localRelPath, remotePath stri
|
|||
return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil
|
||||
}
|
||||
|
||||
func translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
func (t *translateContext) translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
nb, _, err := notebook.Detect(localFullPath)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return "", fmt.Errorf("file %s not found", literal)
|
||||
|
@ -138,7 +148,7 @@ func translateFilePath(literal, localFullPath, localRelPath, remotePath string)
|
|||
return remotePath, nil
|
||||
}
|
||||
|
||||
func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
func (t *translateContext) translateDirectoryPath(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
info, err := os.Stat(localFullPath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
@ -149,20 +159,20 @@ func translateDirectoryPath(literal, localFullPath, localRelPath, remotePath str
|
|||
return remotePath, nil
|
||||
}
|
||||
|
||||
func translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
func (t *translateContext) translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
return localRelPath, nil
|
||||
}
|
||||
|
||||
func translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
func (t *translateContext) translateNoOpWithPrefix(literal, localFullPath, localRelPath, remotePath string) (string, error) {
|
||||
if !strings.HasPrefix(localRelPath, ".") {
|
||||
localRelPath = "." + string(filepath.Separator) + localRelPath
|
||||
}
|
||||
return localRelPath, nil
|
||||
}
|
||||
|
||||
func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) {
|
||||
func (t *translateContext) rewriteValue(p dyn.Path, v dyn.Value, fn rewriteFunc, dir string) (dyn.Value, error) {
|
||||
out := v.MustString()
|
||||
err := m.rewritePath(dir, b, &out, fn)
|
||||
err := t.rewritePath(dir, &out, fn)
|
||||
if err != nil {
|
||||
if target := (&ErrIsNotebook{}); errors.As(err, target) {
|
||||
return dyn.InvalidValue, fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, p, target)
|
||||
|
@ -176,15 +186,15 @@ func (m *translatePaths) rewriteValue(b *bundle.Bundle, p dyn.Path, v dyn.Value,
|
|||
return dyn.NewValue(out, v.Location()), nil
|
||||
}
|
||||
|
||||
func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) {
|
||||
nv, err := m.rewriteValue(b, p, v, fn, dir)
|
||||
func (t *translateContext) rewriteRelativeTo(p dyn.Path, v dyn.Value, fn rewriteFunc, dir, fallback string) (dyn.Value, error) {
|
||||
nv, err := t.rewriteValue(p, v, fn, dir)
|
||||
if err == nil {
|
||||
return nv, nil
|
||||
}
|
||||
|
||||
// If we failed to rewrite the path, try to rewrite it relative to the fallback directory.
|
||||
if fallback != "" {
|
||||
nv, nerr := m.rewriteValue(b, p, v, fn, fallback)
|
||||
nv, nerr := t.rewriteValue(p, v, fn, fallback)
|
||||
if nerr == nil {
|
||||
// TODO: Emit a warning that this path should be rewritten.
|
||||
return nv, nil
|
||||
|
@ -195,16 +205,19 @@ func (m *translatePaths) rewriteRelativeTo(b *bundle.Bundle, p dyn.Path, v dyn.V
|
|||
}
|
||||
|
||||
func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
m.seen = make(map[string]string)
|
||||
t := &translateContext{
|
||||
b: b,
|
||||
seen: make(map[string]string),
|
||||
}
|
||||
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
var err error
|
||||
for _, fn := range []func(*bundle.Bundle, dyn.Value) (dyn.Value, error){
|
||||
m.applyJobTranslations,
|
||||
m.applyPipelineTranslations,
|
||||
m.applyArtifactTranslations,
|
||||
for _, fn := range []func(dyn.Value) (dyn.Value, error){
|
||||
t.applyJobTranslations,
|
||||
t.applyPipelineTranslations,
|
||||
t.applyArtifactTranslations,
|
||||
} {
|
||||
v, err = fn(b, v)
|
||||
v, err = fn(v)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
|
|
@ -3,36 +3,42 @@ package mutator
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
)
|
||||
|
||||
func (m *translatePaths) applyArtifactTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) {
|
||||
var err error
|
||||
type artifactRewritePattern struct {
|
||||
pattern dyn.Pattern
|
||||
fn rewriteFunc
|
||||
}
|
||||
|
||||
func (t *translateContext) artifactRewritePatterns() []artifactRewritePattern {
|
||||
// Base pattern to match all artifacts.
|
||||
base := dyn.NewPattern(
|
||||
dyn.Key("artifacts"),
|
||||
dyn.AnyKey(),
|
||||
)
|
||||
|
||||
for _, t := range []struct {
|
||||
pattern dyn.Pattern
|
||||
fn rewriteFunc
|
||||
}{
|
||||
// Compile list of configuration paths to rewrite.
|
||||
return []artifactRewritePattern{
|
||||
{
|
||||
base.Append(dyn.Key("path")),
|
||||
translateNoOp,
|
||||
t.translateNoOp,
|
||||
},
|
||||
} {
|
||||
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *translateContext) applyArtifactTranslations(v dyn.Value) (dyn.Value, error) {
|
||||
var err error
|
||||
|
||||
for _, rewritePattern := range t.artifactRewritePatterns() {
|
||||
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
key := p[1].Key()
|
||||
dir, err := v.Location().Directory()
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for artifact %s: %w", key, err)
|
||||
}
|
||||
|
||||
return m.rewriteRelativeTo(b, p, v, t.fn, dir, "")
|
||||
return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, "")
|
||||
})
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
|
|
|
@ -4,7 +4,6 @@ import (
|
|||
"fmt"
|
||||
"slices"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/libraries"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
)
|
||||
|
@ -19,55 +18,42 @@ func noSkipRewrite(string) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
func rewritePatterns(base dyn.Pattern) []jobRewritePattern {
|
||||
func rewritePatterns(t *translateContext, base dyn.Pattern) []jobRewritePattern {
|
||||
return []jobRewritePattern{
|
||||
{
|
||||
base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")),
|
||||
translateNotebookPath,
|
||||
t.translateNotebookPath,
|
||||
noSkipRewrite,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")),
|
||||
translateFilePath,
|
||||
t.translateFilePath,
|
||||
noSkipRewrite,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")),
|
||||
translateDirectoryPath,
|
||||
t.translateDirectoryPath,
|
||||
noSkipRewrite,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")),
|
||||
translateFilePath,
|
||||
t.translateFilePath,
|
||||
noSkipRewrite,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")),
|
||||
translateNoOp,
|
||||
t.translateNoOp,
|
||||
noSkipRewrite,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")),
|
||||
translateNoOp,
|
||||
t.translateNoOp,
|
||||
noSkipRewrite,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) {
|
||||
fallback, err := gatherFallbackPaths(v, "jobs")
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
// Do not translate job task paths if using Git source
|
||||
var ignore []string
|
||||
for key, job := range b.Config.Resources.Jobs {
|
||||
if job.GitSource != nil {
|
||||
ignore = append(ignore, key)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *translateContext) jobRewritePatterns() []jobRewritePattern {
|
||||
// Base pattern to match all tasks in all jobs.
|
||||
base := dyn.NewPattern(
|
||||
dyn.Key("resources"),
|
||||
|
@ -90,19 +76,38 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy
|
|||
dyn.Key("dependencies"),
|
||||
dyn.AnyIndex(),
|
||||
),
|
||||
translateNoOpWithPrefix,
|
||||
t.translateNoOpWithPrefix,
|
||||
func(s string) bool {
|
||||
return !libraries.IsEnvironmentDependencyLocal(s)
|
||||
},
|
||||
},
|
||||
}
|
||||
taskPatterns := rewritePatterns(base)
|
||||
forEachPatterns := rewritePatterns(base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
|
||||
|
||||
taskPatterns := rewritePatterns(t, base)
|
||||
forEachPatterns := rewritePatterns(t, base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
|
||||
allPatterns := append(taskPatterns, jobEnvironmentsPatterns...)
|
||||
allPatterns = append(allPatterns, forEachPatterns...)
|
||||
return allPatterns
|
||||
}
|
||||
|
||||
for _, t := range allPatterns {
|
||||
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error) {
|
||||
var err error
|
||||
|
||||
fallback, err := gatherFallbackPaths(v, "jobs")
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
// Do not translate job task paths if using Git source
|
||||
var ignore []string
|
||||
for key, job := range t.b.Config.Resources.Jobs {
|
||||
if job.GitSource != nil {
|
||||
ignore = append(ignore, key)
|
||||
}
|
||||
}
|
||||
|
||||
for _, rewritePattern := range t.jobRewritePatterns() {
|
||||
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
key := p[2].Key()
|
||||
|
||||
// Skip path translation if the job is using git source.
|
||||
|
@ -116,10 +121,10 @@ func (m *translatePaths) applyJobTranslations(b *bundle.Bundle, v dyn.Value) (dy
|
|||
}
|
||||
|
||||
sv := v.MustString()
|
||||
if t.skipRewrite(sv) {
|
||||
if rewritePattern.skipRewrite(sv) {
|
||||
return v, nil
|
||||
}
|
||||
return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key])
|
||||
return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key])
|
||||
})
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
|
|
|
@ -3,16 +3,15 @@ package mutator
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
)
|
||||
|
||||
func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value) (dyn.Value, error) {
|
||||
fallback, err := gatherFallbackPaths(v, "pipelines")
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
type pipelineRewritePattern struct {
|
||||
pattern dyn.Pattern
|
||||
fn rewriteFunc
|
||||
}
|
||||
|
||||
func (t *translateContext) pipelineRewritePatterns() []pipelineRewritePattern {
|
||||
// Base pattern to match all libraries in all pipelines.
|
||||
base := dyn.NewPattern(
|
||||
dyn.Key("resources"),
|
||||
|
@ -22,27 +21,36 @@ func (m *translatePaths) applyPipelineTranslations(b *bundle.Bundle, v dyn.Value
|
|||
dyn.AnyIndex(),
|
||||
)
|
||||
|
||||
for _, t := range []struct {
|
||||
pattern dyn.Pattern
|
||||
fn rewriteFunc
|
||||
}{
|
||||
// Compile list of configuration paths to rewrite.
|
||||
return []pipelineRewritePattern{
|
||||
{
|
||||
base.Append(dyn.Key("notebook"), dyn.Key("path")),
|
||||
translateNotebookPath,
|
||||
t.translateNotebookPath,
|
||||
},
|
||||
{
|
||||
base.Append(dyn.Key("file"), dyn.Key("path")),
|
||||
translateFilePath,
|
||||
t.translateFilePath,
|
||||
},
|
||||
} {
|
||||
v, err = dyn.MapByPattern(v, t.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func (t *translateContext) applyPipelineTranslations(v dyn.Value) (dyn.Value, error) {
|
||||
var err error
|
||||
|
||||
fallback, err := gatherFallbackPaths(v, "pipelines")
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
for _, rewritePattern := range t.pipelineRewritePatterns() {
|
||||
v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||
key := p[2].Key()
|
||||
dir, err := v.Location().Directory()
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err)
|
||||
}
|
||||
|
||||
return m.rewriteRelativeTo(b, p, v, t.fn, dir, fallback[key])
|
||||
return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key])
|
||||
})
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
|
|
|
@ -267,6 +267,11 @@ func (r *Root) InitializeVariables(vars []string) error {
|
|||
if _, ok := r.Variables[name]; !ok {
|
||||
return fmt.Errorf("variable %s has not been defined", name)
|
||||
}
|
||||
|
||||
if r.Variables[name].IsComplex() {
|
||||
return fmt.Errorf("setting variables of complex type via --var flag is not supported: %s", name)
|
||||
}
|
||||
|
||||
err := r.Variables[name].Set(val)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to assign %s to %s: %s", val, name, err)
|
||||
|
@ -341,7 +346,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// Merge `run_as`. This field must be overwritten if set, not merged.
|
||||
if v := target.Get("run_as"); v != dyn.InvalidValue {
|
||||
if v := target.Get("run_as"); v.Kind() != dyn.KindInvalid {
|
||||
root, err = dyn.Set(root, "run_as", v)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -349,7 +354,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// Below, we're setting fields on the bundle key, so make sure it exists.
|
||||
if root.Get("bundle") == dyn.InvalidValue {
|
||||
if root.Get("bundle").Kind() == dyn.KindInvalid {
|
||||
root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}))
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -357,7 +362,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// Merge `mode`. This field must be overwritten if set, not merged.
|
||||
if v := target.Get("mode"); v != dyn.InvalidValue {
|
||||
if v := target.Get("mode"); v.Kind() != dyn.KindInvalid {
|
||||
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -365,7 +370,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// Merge `compute_id`. This field must be overwritten if set, not merged.
|
||||
if v := target.Get("compute_id"); v != dyn.InvalidValue {
|
||||
if v := target.Get("compute_id"); v.Kind() != dyn.KindInvalid {
|
||||
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -373,7 +378,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// Merge `git`.
|
||||
if v := target.Get("git"); v != dyn.InvalidValue {
|
||||
if v := target.Get("git"); v.Kind() != dyn.KindInvalid {
|
||||
ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git")))
|
||||
if err != nil {
|
||||
ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})
|
||||
|
@ -386,7 +391,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
}
|
||||
|
||||
// If the branch was overridden, we need to clear the inferred flag.
|
||||
if branch := v.Get("branch"); branch != dyn.InvalidValue {
|
||||
if branch := v.Get("branch"); branch.Kind() != dyn.KindInvalid {
|
||||
out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{}))
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -414,12 +419,12 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
|
|||
// For each target, rewrite the variables block.
|
||||
return dyn.Map(v, "targets", dyn.Foreach(func(_ dyn.Path, target dyn.Value) (dyn.Value, error) {
|
||||
// Confirm it has a variables block.
|
||||
if target.Get("variables") == dyn.InvalidValue {
|
||||
if target.Get("variables").Kind() == dyn.KindInvalid {
|
||||
return target, nil
|
||||
}
|
||||
|
||||
// For each variable, normalize its contents if it is a single string.
|
||||
return dyn.Map(target, "variables", dyn.Foreach(func(_ dyn.Path, variable dyn.Value) (dyn.Value, error) {
|
||||
return dyn.Map(target, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
|
||||
switch variable.Kind() {
|
||||
|
||||
case dyn.KindString, dyn.KindBool, dyn.KindFloat, dyn.KindInt:
|
||||
|
@ -430,6 +435,21 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
|
|||
"default": variable,
|
||||
}, variable.Location()), nil
|
||||
|
||||
case dyn.KindMap, dyn.KindSequence:
|
||||
// Check if the original definition of variable has a type field.
|
||||
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type")))
|
||||
if err != nil {
|
||||
return variable, nil
|
||||
}
|
||||
|
||||
if typeV.MustString() == "complex" {
|
||||
return dyn.NewValue(map[string]dyn.Value{
|
||||
"default": variable,
|
||||
}, variable.Location()), nil
|
||||
}
|
||||
|
||||
return variable, nil
|
||||
|
||||
default:
|
||||
return variable, nil
|
||||
}
|
||||
|
@ -444,7 +464,7 @@ func validateVariableOverrides(root, target dyn.Value) (err error) {
|
|||
var tv map[string]variable.Variable
|
||||
|
||||
// Collect variables from the root.
|
||||
if v := root.Get("variables"); v != dyn.InvalidValue {
|
||||
if v := root.Get("variables"); v.Kind() != dyn.KindInvalid {
|
||||
err = convert.ToTyped(&rv, v)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to collect variables from root: %w", err)
|
||||
|
@ -452,7 +472,7 @@ func validateVariableOverrides(root, target dyn.Value) (err error) {
|
|||
}
|
||||
|
||||
// Collect variables from the target.
|
||||
if v := target.Get("variables"); v != dyn.InvalidValue {
|
||||
if v := target.Get("variables"); v.Kind() != dyn.KindInvalid {
|
||||
err = convert.ToTyped(&tv, v)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to collect variables from target: %w", err)
|
||||
|
|
|
@ -51,7 +51,7 @@ func TestInitializeVariables(t *testing.T) {
|
|||
root := &Root{
|
||||
Variables: map[string]*variable.Variable{
|
||||
"foo": {
|
||||
Default: &fooDefault,
|
||||
Default: fooDefault,
|
||||
Description: "an optional variable since default is defined",
|
||||
},
|
||||
"bar": {
|
||||
|
@ -62,8 +62,8 @@ func TestInitializeVariables(t *testing.T) {
|
|||
|
||||
err := root.InitializeVariables([]string{"foo=123", "bar=456"})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "123", *(root.Variables["foo"].Value))
|
||||
assert.Equal(t, "456", *(root.Variables["bar"].Value))
|
||||
assert.Equal(t, "123", (root.Variables["foo"].Value))
|
||||
assert.Equal(t, "456", (root.Variables["bar"].Value))
|
||||
}
|
||||
|
||||
func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) {
|
||||
|
@ -77,7 +77,7 @@ func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) {
|
|||
|
||||
err := root.InitializeVariables([]string{"foo=123=567"})
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "123=567", *(root.Variables["foo"].Value))
|
||||
assert.Equal(t, "123=567", (root.Variables["foo"].Value))
|
||||
}
|
||||
|
||||
func TestInitializeVariablesInvalidFormat(t *testing.T) {
|
||||
|
@ -119,3 +119,16 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) {
|
|||
require.NoError(t, root.MergeTargetOverrides("development"))
|
||||
assert.Equal(t, Development, root.Bundle.Mode)
|
||||
}
|
||||
|
||||
func TestInitializeComplexVariablesViaFlagIsNotAllowed(t *testing.T) {
|
||||
root := &Root{
|
||||
Variables: map[string]*variable.Variable{
|
||||
"foo": {
|
||||
Type: variable.VariableTypeComplex,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := root.InitializeVariables([]string{"foo=123"})
|
||||
assert.ErrorContains(t, err, "setting variables of complex type via --var flag is not supported: foo")
|
||||
}
|
||||
|
|
|
@ -2,12 +2,27 @@ package variable
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// We are using `any` because since introduction of complex variables,
|
||||
// variables can be of any type.
|
||||
// Type alias is used to make it easier to understand the code.
|
||||
type VariableValue = any
|
||||
|
||||
type VariableType string
|
||||
|
||||
const (
|
||||
VariableTypeComplex VariableType = "complex"
|
||||
)
|
||||
|
||||
// An input variable for the bundle config
|
||||
type Variable struct {
|
||||
// A type of the variable. This is used to validate the value of the variable
|
||||
Type VariableType `json:"type,omitempty"`
|
||||
|
||||
// A default value which then makes the variable optional
|
||||
Default *string `json:"default,omitempty"`
|
||||
Default VariableValue `json:"default,omitempty"`
|
||||
|
||||
// Documentation for this input variable
|
||||
Description string `json:"description,omitempty"`
|
||||
|
@ -21,7 +36,7 @@ type Variable struct {
|
|||
// 4. Default value defined in variable definition
|
||||
// 5. Throw error, since if no default value is defined, then the variable
|
||||
// is required
|
||||
Value *string `json:"value,omitempty" bundle:"readonly"`
|
||||
Value VariableValue `json:"value,omitempty" bundle:"readonly"`
|
||||
|
||||
// The value of this field will be used to lookup the resource by name
|
||||
// And assign the value of the variable to ID of the resource found.
|
||||
|
@ -39,10 +54,24 @@ func (v *Variable) HasValue() bool {
|
|||
return v.Value != nil
|
||||
}
|
||||
|
||||
func (v *Variable) Set(val string) error {
|
||||
func (v *Variable) Set(val VariableValue) error {
|
||||
if v.HasValue() {
|
||||
return fmt.Errorf("variable has already been assigned value: %s", *v.Value)
|
||||
return fmt.Errorf("variable has already been assigned value: %s", v.Value)
|
||||
}
|
||||
v.Value = &val
|
||||
|
||||
rv := reflect.ValueOf(val)
|
||||
switch rv.Kind() {
|
||||
case reflect.Struct, reflect.Array, reflect.Slice, reflect.Map:
|
||||
if v.Type != VariableTypeComplex {
|
||||
return fmt.Errorf("variable type is not complex")
|
||||
}
|
||||
}
|
||||
|
||||
v.Value = val
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *Variable) IsComplex() bool {
|
||||
return v.Type == VariableTypeComplex
|
||||
}
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
package schema
|
||||
|
||||
const ProviderVersion = "1.47.0"
|
||||
const ProviderVersion = "1.48.0"
|
||||
|
|
|
@ -28,6 +28,7 @@ type Config struct {
|
|||
Profile string `json:"profile,omitempty"`
|
||||
RateLimit int `json:"rate_limit,omitempty"`
|
||||
RetryTimeoutSeconds int `json:"retry_timeout_seconds,omitempty"`
|
||||
ServerlessComputeId string `json:"serverless_compute_id,omitempty"`
|
||||
SkipVerify bool `json:"skip_verify,omitempty"`
|
||||
Token string `json:"token,omitempty"`
|
||||
Username string `json:"username,omitempty"`
|
||||
|
|
|
@ -19,6 +19,7 @@ type DataSourceExternalLocationExternalLocationInfo struct {
|
|||
CreatedBy string `json:"created_by,omitempty"`
|
||||
CredentialId string `json:"credential_id,omitempty"`
|
||||
CredentialName string `json:"credential_name,omitempty"`
|
||||
IsolationMode string `json:"isolation_mode,omitempty"`
|
||||
MetastoreId string `json:"metastore_id,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
|
|
|
@ -26,6 +26,7 @@ type DataSourceJobJobSettingsSettingsEmailNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -500,6 +501,7 @@ type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -529,6 +531,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskEmailNotifications struc
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -824,6 +827,10 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSt
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -832,6 +839,7 @@ type DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotifications str
|
|||
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []DataSourceJobJobSettingsSettingsTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -1163,6 +1171,10 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart struct {
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -1171,6 +1183,7 @@ type DataSourceJobJobSettingsSettingsTaskWebhookNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []DataSourceJobJobSettingsSettingsTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -1236,6 +1249,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart struct {
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -1244,6 +1261,7 @@ type DataSourceJobJobSettingsSettingsWebhookNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ type DataSourceStorageCredentialStorageCredentialInfo struct {
|
|||
CreatedAt int `json:"created_at,omitempty"`
|
||||
CreatedBy string `json:"created_by,omitempty"`
|
||||
Id string `json:"id,omitempty"`
|
||||
IsolationMode string `json:"isolation_mode,omitempty"`
|
||||
MetastoreId string `json:"metastore_id,omitempty"`
|
||||
Name string `json:"name,omitempty"`
|
||||
Owner string `json:"owner,omitempty"`
|
||||
|
|
|
@ -26,6 +26,7 @@ type ResourceJobEmailNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -573,6 +574,7 @@ type ResourceJobTaskEmailNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -602,6 +604,7 @@ type ResourceJobTaskForEachTaskTaskEmailNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []string `json:"on_failure,omitempty"`
|
||||
OnStart []string `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []string `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []string `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -943,6 +946,10 @@ type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart struct {
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -951,6 +958,7 @@ type ResourceJobTaskForEachTaskTaskWebhookNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []ResourceJobTaskForEachTaskTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -1329,6 +1337,10 @@ type ResourceJobTaskWebhookNotificationsOnStart struct {
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobTaskWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -1337,6 +1349,7 @@ type ResourceJobTaskWebhookNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []ResourceJobTaskWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []ResourceJobTaskWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []ResourceJobTaskWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []ResourceJobTaskWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []ResourceJobTaskWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
@ -1378,6 +1391,11 @@ type ResourceJobTriggerFileArrival struct {
|
|||
WaitAfterLastChangeSeconds int `json:"wait_after_last_change_seconds,omitempty"`
|
||||
}
|
||||
|
||||
type ResourceJobTriggerPeriodic struct {
|
||||
Interval int `json:"interval"`
|
||||
Unit string `json:"unit"`
|
||||
}
|
||||
|
||||
type ResourceJobTriggerTable struct {
|
||||
Condition string `json:"condition,omitempty"`
|
||||
MinTimeBetweenTriggersSeconds int `json:"min_time_between_triggers_seconds,omitempty"`
|
||||
|
@ -1395,6 +1413,7 @@ type ResourceJobTriggerTableUpdate struct {
|
|||
type ResourceJobTrigger struct {
|
||||
PauseStatus string `json:"pause_status,omitempty"`
|
||||
FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"`
|
||||
Periodic *ResourceJobTriggerPeriodic `json:"periodic,omitempty"`
|
||||
Table *ResourceJobTriggerTable `json:"table,omitempty"`
|
||||
TableUpdate *ResourceJobTriggerTableUpdate `json:"table_update,omitempty"`
|
||||
}
|
||||
|
@ -1411,6 +1430,10 @@ type ResourceJobWebhookNotificationsOnStart struct {
|
|||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobWebhookNotificationsOnStreamingBacklogExceeded struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
||||
type ResourceJobWebhookNotificationsOnSuccess struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
|
@ -1419,6 +1442,7 @@ type ResourceJobWebhookNotifications struct {
|
|||
OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"`
|
||||
OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"`
|
||||
OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"`
|
||||
OnStreamingBacklogExceeded []ResourceJobWebhookNotificationsOnStreamingBacklogExceeded `json:"on_streaming_backlog_exceeded,omitempty"`
|
||||
OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"`
|
||||
}
|
||||
|
||||
|
|
|
@ -22,5 +22,6 @@ type ResourceOnlineTable struct {
|
|||
Id string `json:"id,omitempty"`
|
||||
Name string `json:"name"`
|
||||
Status []any `json:"status,omitempty"`
|
||||
TableServingUrl string `json:"table_serving_url,omitempty"`
|
||||
Spec *ResourceOnlineTableSpec `json:"spec,omitempty"`
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ type Root struct {
|
|||
|
||||
const ProviderHost = "registry.terraform.io"
|
||||
const ProviderSource = "databricks/databricks"
|
||||
const ProviderVersion = "1.47.0"
|
||||
const ProviderVersion = "1.48.0"
|
||||
|
||||
func NewRoot() *Root {
|
||||
return &Root{
|
||||
|
|
|
@ -29,11 +29,13 @@ func Initialize() bundle.Mutator {
|
|||
mutator.ExpandWorkspaceRoot(),
|
||||
mutator.DefineDefaultWorkspacePaths(),
|
||||
mutator.SetVariables(),
|
||||
// Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences
|
||||
// and ResolveVariableReferences. See what is expected in PythonMutatorPhaseInit doc
|
||||
// Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences,
|
||||
// ResolveVariableReferencesInComplexVariables and ResolveVariableReferences.
|
||||
// See what is expected in PythonMutatorPhaseInit doc
|
||||
pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit),
|
||||
mutator.ResolveVariableReferencesInLookup(),
|
||||
mutator.ResolveResourceReferences(),
|
||||
mutator.ResolveVariableReferencesInComplexVariables(),
|
||||
mutator.ResolveVariableReferences(
|
||||
"bundle",
|
||||
"workspace",
|
||||
|
|
|
@ -79,6 +79,17 @@
|
|||
"experimental": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"pydabs": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"description": ""
|
||||
},
|
||||
"venv_path": {
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
},
|
||||
"python_wheel_wrapper": {
|
||||
"description": ""
|
||||
},
|
||||
|
@ -236,6 +247,12 @@
|
|||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
|
||||
"items": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
|
||||
"items": {
|
||||
|
@ -853,6 +870,12 @@
|
|||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
|
||||
"items": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
|
||||
"items": {
|
||||
|
@ -1595,6 +1618,17 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
|
||||
"items": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
|
||||
"items": {
|
||||
|
@ -1634,6 +1668,17 @@
|
|||
"pause_status": {
|
||||
"description": "Whether this trigger is paused or not."
|
||||
},
|
||||
"periodic": {
|
||||
"description": "Periodic trigger settings.",
|
||||
"properties": {
|
||||
"interval": {
|
||||
"description": "The interval at which the trigger should run."
|
||||
},
|
||||
"unit": {
|
||||
"description": "The unit of time for the interval."
|
||||
}
|
||||
}
|
||||
},
|
||||
"table": {
|
||||
"description": "Old table trigger settings name. Deprecated in favor of `table_update`.",
|
||||
"properties": {
|
||||
|
@ -1712,6 +1757,17 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
|
||||
"items": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
|
||||
"items": {
|
||||
|
@ -1740,16 +1796,16 @@
|
|||
"description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
|
||||
"properties": {
|
||||
"catalog_name": {
|
||||
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set."
|
||||
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled."
|
||||
},
|
||||
"enabled": {
|
||||
"description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again."
|
||||
"description": "Indicates whether the inference table is enabled."
|
||||
},
|
||||
"schema_name": {
|
||||
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set."
|
||||
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled."
|
||||
},
|
||||
"table_name_prefix": {
|
||||
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set."
|
||||
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -2623,7 +2679,7 @@
|
|||
}
|
||||
},
|
||||
"notebook": {
|
||||
"description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n",
|
||||
"description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n",
|
||||
"properties": {
|
||||
"path": {
|
||||
"description": "The absolute path of the notebook."
|
||||
|
@ -3167,6 +3223,12 @@
|
|||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
|
||||
"items": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
|
||||
"items": {
|
||||
|
@ -3784,6 +3846,12 @@
|
|||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "A list of email addresses to notify when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.",
|
||||
"items": {
|
||||
"description": ""
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "A list of email addresses to be notified when a run successfully completes. A run is considered to have completed successfully if it ends with a `TERMINATED` `life_cycle_state` and a `SUCCESS` result_state. If not specified on job creation, reset, or update, the list is empty, and notifications are not sent.",
|
||||
"items": {
|
||||
|
@ -4526,6 +4594,17 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
|
||||
"items": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
|
||||
"items": {
|
||||
|
@ -4565,6 +4644,17 @@
|
|||
"pause_status": {
|
||||
"description": "Whether this trigger is paused or not."
|
||||
},
|
||||
"periodic": {
|
||||
"description": "Periodic trigger settings.",
|
||||
"properties": {
|
||||
"interval": {
|
||||
"description": "The interval at which the trigger should run."
|
||||
},
|
||||
"unit": {
|
||||
"description": "The unit of time for the interval."
|
||||
}
|
||||
}
|
||||
},
|
||||
"table": {
|
||||
"description": "Old table trigger settings name. Deprecated in favor of `table_update`.",
|
||||
"properties": {
|
||||
|
@ -4643,6 +4733,17 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"on_streaming_backlog_exceeded": {
|
||||
"description": "An optional list of system notification IDs to call when any streaming backlog thresholds are exceeded for any stream.\nStreaming backlog thresholds can be set in the `health` field using the following metrics: `STREAMING_BACKLOG_BYTES`, `STREAMING_BACKLOG_RECORDS`, `STREAMING_BACKLOG_SECONDS`, or `STREAMING_BACKLOG_FILES`.\nAlerting is based on the 10-minute average of these metrics. If the issue persists, notifications are resent every 30 minutes.\nA maximum of 3 destinations can be specified for the `on_streaming_backlog_exceeded` property.",
|
||||
"items": {
|
||||
"description": "",
|
||||
"properties": {
|
||||
"id": {
|
||||
"description": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"on_success": {
|
||||
"description": "An optional list of system notification IDs to call when the run completes successfully. A maximum of 3 destinations can be specified for the `on_success` property.",
|
||||
"items": {
|
||||
|
@ -4671,16 +4772,16 @@
|
|||
"description": "Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.",
|
||||
"properties": {
|
||||
"catalog_name": {
|
||||
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if it was already set."
|
||||
"description": "The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if the inference table is already enabled."
|
||||
},
|
||||
"enabled": {
|
||||
"description": "If inference tables are enabled or not. NOTE: If you have already disabled payload logging once, you cannot enable again."
|
||||
"description": "Indicates whether the inference table is enabled."
|
||||
},
|
||||
"schema_name": {
|
||||
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if it was already set."
|
||||
"description": "The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if the inference table is already enabled."
|
||||
},
|
||||
"table_name_prefix": {
|
||||
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if it was already set."
|
||||
"description": "The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if the inference table is already enabled."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -5554,7 +5655,7 @@
|
|||
}
|
||||
},
|
||||
"notebook": {
|
||||
"description": "The path to a notebook that defines a pipeline and is stored in the \u003cDatabricks\u003e workspace.\n",
|
||||
"description": "The path to a notebook that defines a pipeline and is stored in the Databricks workspace.\n",
|
||||
"properties": {
|
||||
"path": {
|
||||
"description": "The absolute path of the notebook."
|
||||
|
|
|
@ -20,7 +20,7 @@ func TestIntSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
@ -47,7 +47,7 @@ func TestBooleanSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}`
|
||||
|
@ -123,7 +123,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -134,7 +134,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -145,7 +145,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -156,7 +156,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -167,7 +167,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -178,7 +178,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -189,7 +189,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -200,7 +200,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -214,7 +214,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -225,7 +225,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -236,7 +236,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -247,7 +247,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -258,7 +258,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -326,7 +326,7 @@ func TestStructOfStructsSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -391,7 +391,7 @@ func TestStructOfMapsSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -481,7 +481,7 @@ func TestMapOfPrimitivesSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -518,7 +518,7 @@ func TestMapOfStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -556,7 +556,7 @@ func TestMapOfMapSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -661,7 +661,7 @@ func TestSliceOfMapSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -699,7 +699,7 @@ func TestSliceOfStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -757,7 +757,7 @@ func TestEmbeddedStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -797,7 +797,7 @@ func TestEmbeddedStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -892,7 +892,7 @@ func TestNonAnnotatedFieldsAreSkipped(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -934,7 +934,7 @@ func TestDashFieldsAreSkipped(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -987,7 +987,7 @@ func TestPointerInStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1004,7 +1004,7 @@ func TestPointerInStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1018,7 +1018,7 @@ func TestPointerInStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1035,7 +1035,7 @@ func TestPointerInStructSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1106,7 +1106,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1129,7 +1129,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1157,7 +1157,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1180,7 +1180,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1210,7 +1210,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1236,7 +1236,7 @@ func TestGenericSchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1322,7 +1322,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1333,7 +1333,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1347,7 +1347,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1429,7 +1429,7 @@ func TestDocIngestionForObject(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1512,7 +1512,7 @@ func TestDocIngestionForSlice(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1524,7 +1524,7 @@ func TestDocIngestionForSlice(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1611,7 +1611,7 @@ func TestDocIngestionForMap(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1623,7 +1623,7 @@ func TestDocIngestionForMap(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1683,7 +1683,7 @@ func TestDocIngestionForTopLevelPrimitive(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
@ -1761,7 +1761,7 @@ func TestInterfaceGeneratesEmptySchema(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1810,7 +1810,7 @@ func TestBundleReadOnlytag(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
@ -1870,7 +1870,7 @@ func TestBundleInternalTag(t *testing.T) {
|
|||
},
|
||||
{
|
||||
"type": "string",
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}"
|
||||
"pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestComplexVariables(t *testing.T) {
|
||||
b, diags := loadTargetWithDiags("variables/complex", "default")
|
||||
require.Empty(t, diags)
|
||||
|
||||
diags = bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SetVariables(),
|
||||
mutator.ResolveVariableReferencesInComplexVariables(),
|
||||
mutator.ResolveVariableReferences(
|
||||
"variables",
|
||||
),
|
||||
))
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
|
||||
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
|
||||
require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
|
||||
require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
|
||||
|
||||
require.Equal(t, 3, len(b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries))
|
||||
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
|
||||
Jar: "/path/to/jar",
|
||||
})
|
||||
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
|
||||
Egg: "/path/to/egg",
|
||||
})
|
||||
require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{
|
||||
Whl: "/path/to/whl",
|
||||
})
|
||||
|
||||
require.Equal(t, "task with spark version 13.2.x-scala2.11 and jar /path/to/jar", b.Config.Resources.Jobs["my_job"].Tasks[0].TaskKey)
|
||||
}
|
||||
|
||||
func TestComplexVariablesOverride(t *testing.T) {
|
||||
b, diags := loadTargetWithDiags("variables/complex", "dev")
|
||||
require.Empty(t, diags)
|
||||
|
||||
diags = bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SetVariables(),
|
||||
mutator.ResolveVariableReferencesInComplexVariables(),
|
||||
mutator.ResolveVariableReferences(
|
||||
"variables",
|
||||
),
|
||||
))
|
||||
require.NoError(t, diags.Error())
|
||||
|
||||
require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
|
||||
require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
|
||||
require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
|
||||
require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
bundle:
|
||||
name: "abc"
|
||||
|
||||
run_as:
|
||||
service_principal_name: ""
|
|
@ -0,0 +1,5 @@
|
|||
bundle:
|
||||
name: "abc"
|
||||
|
||||
run_as:
|
||||
user_name: ""
|
|
@ -0,0 +1,6 @@
|
|||
bundle:
|
||||
name: "abc"
|
||||
|
||||
run_as:
|
||||
service_principal_name: ""
|
||||
user_name: ""
|
|
@ -196,7 +196,33 @@ func TestRunAsErrorWhenBothUserAndSpSpecified(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) {
|
||||
b := load(t, "./run_as/not_allowed/neither_sp_nor_user")
|
||||
tcases := []struct {
|
||||
name string
|
||||
err string
|
||||
}{
|
||||
{
|
||||
name: "empty_run_as",
|
||||
err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_run_as/databricks.yml")),
|
||||
},
|
||||
{
|
||||
name: "empty_sp",
|
||||
err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_sp/databricks.yml")),
|
||||
},
|
||||
{
|
||||
name: "empty_user",
|
||||
err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_user/databricks.yml")),
|
||||
},
|
||||
{
|
||||
name: "empty_user_and_sp",
|
||||
err: fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:5:3", filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/empty_user_and_sp/databricks.yml")),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range tcases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
|
||||
bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name)
|
||||
b := load(t, bundlePath)
|
||||
|
||||
ctx := context.Background()
|
||||
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
|
@ -210,13 +236,13 @@ func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) {
|
|||
|
||||
diags := bundle.Apply(ctx, b, mutator.SetRunAs())
|
||||
err := diags.Error()
|
||||
|
||||
configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/databricks.yml")
|
||||
assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:8", configPath))
|
||||
assert.EqualError(t, err, tc.err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) {
|
||||
b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user_override", "development")
|
||||
b := loadTarget(t, "./run_as/not_allowed/neither_sp_nor_user/override", "development")
|
||||
|
||||
ctx := context.Background()
|
||||
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
|
@ -231,7 +257,7 @@ func TestRunAsErrorNeitherUserOrSpSpecifiedAtTargetOverride(t *testing.T) {
|
|||
diags := bundle.Apply(ctx, b, mutator.SetRunAs())
|
||||
err := diags.Error()
|
||||
|
||||
configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user_override/override.yml")
|
||||
configPath := filepath.FromSlash("run_as/not_allowed/neither_sp_nor_user/override/override.yml")
|
||||
assert.EqualError(t, err, fmt.Sprintf("run_as section must specify exactly one identity. Neither service_principal_name nor user_name is specified at %s:4:12", configPath))
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
bundle:
|
||||
name: complex-variables
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
my_job:
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster: ${var.cluster}
|
||||
tasks:
|
||||
- task_key: test
|
||||
job_cluster_key: key
|
||||
libraries: ${variables.libraries.value}
|
||||
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"
|
||||
|
||||
variables:
|
||||
node_type:
|
||||
default: "Standard_DS3_v2"
|
||||
cluster:
|
||||
type: complex
|
||||
description: "A cluster definition"
|
||||
default:
|
||||
spark_version: "13.2.x-scala2.11"
|
||||
node_type_id: ${var.node_type}
|
||||
num_workers: 2
|
||||
spark_conf:
|
||||
spark.speculation: true
|
||||
spark.databricks.delta.retentionDurationCheck.enabled: false
|
||||
libraries:
|
||||
type: complex
|
||||
description: "A libraries definition"
|
||||
default:
|
||||
- jar: "/path/to/jar"
|
||||
- egg: "/path/to/egg"
|
||||
- whl: "/path/to/whl"
|
||||
|
||||
|
||||
targets:
|
||||
default:
|
||||
dev:
|
||||
variables:
|
||||
node_type: "Standard_DS3_v3"
|
||||
cluster:
|
||||
spark_version: "14.2.x-scala2.11"
|
||||
node_type_id: ${var.node_type}
|
||||
num_workers: 4
|
||||
spark_conf:
|
||||
spark.speculation: false
|
||||
spark.databricks.delta.retentionDurationCheck.enabled: false
|
|
@ -109,8 +109,8 @@ func TestVariablesWithoutDefinition(t *testing.T) {
|
|||
require.NoError(t, diags.Error())
|
||||
require.True(t, b.Config.Variables["a"].HasValue())
|
||||
require.True(t, b.Config.Variables["b"].HasValue())
|
||||
assert.Equal(t, "foo", *b.Config.Variables["a"].Value)
|
||||
assert.Equal(t, "bar", *b.Config.Variables["b"].Value)
|
||||
assert.Equal(t, "foo", b.Config.Variables["a"].Value)
|
||||
assert.Equal(t, "bar", b.Config.Variables["b"].Value)
|
||||
}
|
||||
|
||||
func TestVariablesWithTargetLookupOverrides(t *testing.T) {
|
||||
|
@ -140,9 +140,9 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) {
|
|||
))
|
||||
|
||||
require.NoError(t, diags.Error())
|
||||
assert.Equal(t, "4321", *b.Config.Variables["d"].Value)
|
||||
assert.Equal(t, "1234", *b.Config.Variables["e"].Value)
|
||||
assert.Equal(t, "9876", *b.Config.Variables["f"].Value)
|
||||
assert.Equal(t, "4321", b.Config.Variables["d"].Value)
|
||||
assert.Equal(t, "1234", b.Config.Variables["e"].Value)
|
||||
assert.Equal(t, "9876", b.Config.Variables["f"].Value)
|
||||
}
|
||||
|
||||
func TestVariableTargetOverrides(t *testing.T) {
|
||||
|
|
|
@ -24,7 +24,12 @@ func New() *cobra.Command {
|
|||
Databricks SQL object that periodically runs a query, evaluates a condition of
|
||||
its result, and notifies one or more users and/or notification destinations if
|
||||
the condition was met. Alerts can be scheduled using the sql_task type of
|
||||
the Jobs API, e.g. :method:jobs/create.`,
|
||||
the Jobs API, e.g. :method:jobs/create.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
|
||||
GroupID: "sql",
|
||||
Annotations: map[string]string{
|
||||
"package": "sql",
|
||||
|
@ -73,7 +78,12 @@ func newCreate() *cobra.Command {
|
|||
|
||||
Creates an alert. An alert is a Databricks SQL object that periodically runs a
|
||||
query, evaluates a condition of its result, and notifies users or notification
|
||||
destinations if the condition was met.`
|
||||
destinations if the condition was met.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -131,8 +141,13 @@ func newDelete() *cobra.Command {
|
|||
cmd.Long = `Delete an alert.
|
||||
|
||||
Deletes an alert. Deleted alerts are no longer accessible and cannot be
|
||||
restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to
|
||||
the trash.`
|
||||
restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to
|
||||
the trash.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -199,7 +214,12 @@ func newGet() *cobra.Command {
|
|||
cmd.Short = `Get an alert.`
|
||||
cmd.Long = `Get an alert.
|
||||
|
||||
Gets an alert.`
|
||||
Gets an alert.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -261,7 +281,12 @@ func newList() *cobra.Command {
|
|||
cmd.Short = `Get alerts.`
|
||||
cmd.Long = `Get alerts.
|
||||
|
||||
Gets a list of alerts.`
|
||||
Gets a list of alerts.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -312,7 +337,12 @@ func newUpdate() *cobra.Command {
|
|||
cmd.Short = `Update an alert.`
|
||||
cmd.Long = `Update an alert.
|
||||
|
||||
Updates an alert.`
|
||||
Updates an alert.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ func New() *cobra.Command {
|
|||
cmd.AddCommand(newGetEnvironment())
|
||||
cmd.AddCommand(newList())
|
||||
cmd.AddCommand(newListDeployments())
|
||||
cmd.AddCommand(newStart())
|
||||
cmd.AddCommand(newStop())
|
||||
cmd.AddCommand(newUpdate())
|
||||
|
||||
|
@ -615,6 +616,64 @@ func newListDeployments() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start start command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var startOverrides []func(
|
||||
*cobra.Command,
|
||||
*serving.StartAppRequest,
|
||||
)
|
||||
|
||||
func newStart() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var startReq serving.StartAppRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Use = "start NAME"
|
||||
cmd.Short = `Start an app.`
|
||||
cmd.Long = `Start an app.
|
||||
|
||||
Start the last active deployment of the app in the workspace.
|
||||
|
||||
Arguments:
|
||||
NAME: The name of the app.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(1)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
startReq.Name = args[0]
|
||||
|
||||
response, err := w.Apps.Start(ctx, startReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range startOverrides {
|
||||
fn(cmd, &startReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start stop command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
|
|
|
@ -273,6 +273,8 @@ func newList() *cobra.Command {
|
|||
// TODO: short flags
|
||||
|
||||
cmd.Flags().BoolVar(&listReq.IncludeBrowse, "include-browse", listReq.IncludeBrowse, `Whether to include catalogs in the response for which the principal can only access selective metadata for.`)
|
||||
cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of catalogs to return.`)
|
||||
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`)
|
||||
|
||||
cmd.Use = "list"
|
||||
cmd.Short = `List catalogs.`
|
||||
|
|
|
@ -268,8 +268,8 @@ func newList() *cobra.Command {
|
|||
|
||||
Fetch a paginated list of dashboard objects.
|
||||
|
||||
### **Warning: Calling this API concurrently 10 or more times could result in
|
||||
throttling, service degradation, or a temporary ban.**`
|
||||
**Warning**: Calling this API concurrently 10 or more times could result in
|
||||
throttling, service degradation, or a temporary ban.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
|
|
@ -25,7 +25,12 @@ func New() *cobra.Command {
|
|||
This API does not support searches. It returns the full list of SQL warehouses
|
||||
in your workspace. We advise you to use any text editor, REST client, or
|
||||
grep to search the response from this API for the name of your SQL warehouse
|
||||
as it appears in Databricks SQL.`,
|
||||
as it appears in Databricks SQL.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
|
||||
GroupID: "sql",
|
||||
Annotations: map[string]string{
|
||||
"package": "sql",
|
||||
|
@ -60,7 +65,12 @@ func newList() *cobra.Command {
|
|||
|
||||
Retrieves a full list of SQL warehouses available in this workspace. All
|
||||
fields that appear in this API response are enumerated for clarity. However,
|
||||
you need only a SQL warehouse's id to create new queries against it.`
|
||||
you need only a SQL warehouse's id to create new queries against it.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
|
|
@ -348,6 +348,7 @@ func newUpdate() *cobra.Command {
|
|||
cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`)
|
||||
// TODO: complex arg: encryption_details
|
||||
cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`)
|
||||
cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`)
|
||||
cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the external location.`)
|
||||
cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`)
|
||||
cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`)
|
||||
|
|
|
@ -69,6 +69,8 @@ func newCreate() *cobra.Command {
|
|||
cmd.Short = `Create a function.`
|
||||
cmd.Long = `Create a function.
|
||||
|
||||
**WARNING: This API is experimental and will change in future versions**
|
||||
|
||||
Creates a new function
|
||||
|
||||
The user must have the following permissions in order for the function to be
|
||||
|
|
|
@ -1502,24 +1502,15 @@ func newSubmit() *cobra.Command {
|
|||
cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
// TODO: array: access_control_list
|
||||
// TODO: complex arg: condition_task
|
||||
// TODO: complex arg: dbt_task
|
||||
// TODO: complex arg: email_notifications
|
||||
// TODO: array: environments
|
||||
// TODO: complex arg: git_source
|
||||
// TODO: complex arg: health
|
||||
cmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`)
|
||||
// TODO: complex arg: notebook_task
|
||||
// TODO: complex arg: notification_settings
|
||||
// TODO: complex arg: pipeline_task
|
||||
// TODO: complex arg: python_wheel_task
|
||||
// TODO: complex arg: queue
|
||||
// TODO: complex arg: run_as
|
||||
// TODO: complex arg: run_job_task
|
||||
cmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`)
|
||||
// TODO: complex arg: spark_jar_task
|
||||
// TODO: complex arg: spark_python_task
|
||||
// TODO: complex arg: spark_submit_task
|
||||
// TODO: complex arg: sql_task
|
||||
// TODO: array: tasks
|
||||
cmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`)
|
||||
// TODO: complex arg: webhook_notifications
|
||||
|
|
|
@ -31,13 +31,23 @@ func New() *cobra.Command {
|
|||
|
||||
// Add methods
|
||||
cmd.AddCommand(newCreate())
|
||||
cmd.AddCommand(newCreateSchedule())
|
||||
cmd.AddCommand(newCreateSubscription())
|
||||
cmd.AddCommand(newDeleteSchedule())
|
||||
cmd.AddCommand(newDeleteSubscription())
|
||||
cmd.AddCommand(newGet())
|
||||
cmd.AddCommand(newGetPublished())
|
||||
cmd.AddCommand(newGetSchedule())
|
||||
cmd.AddCommand(newGetSubscription())
|
||||
cmd.AddCommand(newList())
|
||||
cmd.AddCommand(newListSchedules())
|
||||
cmd.AddCommand(newListSubscriptions())
|
||||
cmd.AddCommand(newMigrate())
|
||||
cmd.AddCommand(newPublish())
|
||||
cmd.AddCommand(newTrash())
|
||||
cmd.AddCommand(newUnpublish())
|
||||
cmd.AddCommand(newUpdate())
|
||||
cmd.AddCommand(newUpdateSchedule())
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range cmdOverrides {
|
||||
|
@ -126,6 +136,277 @@ func newCreate() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start create-schedule command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var createScheduleOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.CreateScheduleRequest,
|
||||
)
|
||||
|
||||
func newCreateSchedule() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var createScheduleReq dashboards.CreateScheduleRequest
|
||||
var createScheduleJson flags.JsonFlag
|
||||
|
||||
// TODO: short flags
|
||||
cmd.Flags().Var(&createScheduleJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Flags().StringVar(&createScheduleReq.DisplayName, "display-name", createScheduleReq.DisplayName, `The display name for schedule.`)
|
||||
cmd.Flags().Var(&createScheduleReq.PauseStatus, "pause-status", `The status indicates whether this schedule is paused or not. Supported values: [PAUSED, UNPAUSED]`)
|
||||
|
||||
cmd.Use = "create-schedule DASHBOARD_ID"
|
||||
cmd.Short = `Create dashboard schedule.`
|
||||
cmd.Long = `Create dashboard schedule.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(1)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
if cmd.Flags().Changed("json") {
|
||||
err = createScheduleJson.Unmarshal(&createScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
||||
}
|
||||
createScheduleReq.DashboardId = args[0]
|
||||
|
||||
response, err := w.Lakeview.CreateSchedule(ctx, createScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range createScheduleOverrides {
|
||||
fn(cmd, &createScheduleReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start create-subscription command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var createSubscriptionOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.CreateSubscriptionRequest,
|
||||
)
|
||||
|
||||
func newCreateSubscription() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var createSubscriptionReq dashboards.CreateSubscriptionRequest
|
||||
var createSubscriptionJson flags.JsonFlag
|
||||
|
||||
// TODO: short flags
|
||||
cmd.Flags().Var(&createSubscriptionJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Use = "create-subscription DASHBOARD_ID SCHEDULE_ID"
|
||||
cmd.Short = `Create schedule subscription.`
|
||||
cmd.Long = `Create schedule subscription.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the subscription belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule to which the subscription belongs.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(2)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
if cmd.Flags().Changed("json") {
|
||||
err = createSubscriptionJson.Unmarshal(&createSubscriptionReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
||||
}
|
||||
createSubscriptionReq.DashboardId = args[0]
|
||||
createSubscriptionReq.ScheduleId = args[1]
|
||||
|
||||
response, err := w.Lakeview.CreateSubscription(ctx, createSubscriptionReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range createSubscriptionOverrides {
|
||||
fn(cmd, &createSubscriptionReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start delete-schedule command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var deleteScheduleOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.DeleteScheduleRequest,
|
||||
)
|
||||
|
||||
func newDeleteSchedule() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var deleteScheduleReq dashboards.DeleteScheduleRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Flags().StringVar(&deleteScheduleReq.Etag, "etag", deleteScheduleReq.Etag, `The etag for the schedule.`)
|
||||
|
||||
cmd.Use = "delete-schedule DASHBOARD_ID SCHEDULE_ID"
|
||||
cmd.Short = `Delete dashboard schedule.`
|
||||
cmd.Long = `Delete dashboard schedule.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(2)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
deleteScheduleReq.DashboardId = args[0]
|
||||
deleteScheduleReq.ScheduleId = args[1]
|
||||
|
||||
err = w.Lakeview.DeleteSchedule(ctx, deleteScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range deleteScheduleOverrides {
|
||||
fn(cmd, &deleteScheduleReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start delete-subscription command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var deleteSubscriptionOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.DeleteSubscriptionRequest,
|
||||
)
|
||||
|
||||
func newDeleteSubscription() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var deleteSubscriptionReq dashboards.DeleteSubscriptionRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Flags().StringVar(&deleteSubscriptionReq.Etag, "etag", deleteSubscriptionReq.Etag, `The etag for the subscription.`)
|
||||
|
||||
cmd.Use = "delete-subscription DASHBOARD_ID SCHEDULE_ID SUBSCRIPTION_ID"
|
||||
cmd.Short = `Delete schedule subscription.`
|
||||
cmd.Long = `Delete schedule subscription.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard which the subscription belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule which the subscription belongs.
|
||||
SUBSCRIPTION_ID: UUID identifying the subscription.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(3)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
deleteSubscriptionReq.DashboardId = args[0]
|
||||
deleteSubscriptionReq.ScheduleId = args[1]
|
||||
deleteSubscriptionReq.SubscriptionId = args[2]
|
||||
|
||||
err = w.Lakeview.DeleteSubscription(ctx, deleteSubscriptionReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range deleteSubscriptionOverrides {
|
||||
fn(cmd, &deleteSubscriptionReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start get command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
|
@ -242,6 +523,303 @@ func newGetPublished() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start get-schedule command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var getScheduleOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.GetScheduleRequest,
|
||||
)
|
||||
|
||||
func newGetSchedule() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var getScheduleReq dashboards.GetScheduleRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Use = "get-schedule DASHBOARD_ID SCHEDULE_ID"
|
||||
cmd.Short = `Get dashboard schedule.`
|
||||
cmd.Long = `Get dashboard schedule.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(2)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
getScheduleReq.DashboardId = args[0]
|
||||
getScheduleReq.ScheduleId = args[1]
|
||||
|
||||
response, err := w.Lakeview.GetSchedule(ctx, getScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range getScheduleOverrides {
|
||||
fn(cmd, &getScheduleReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start get-subscription command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var getSubscriptionOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.GetSubscriptionRequest,
|
||||
)
|
||||
|
||||
func newGetSubscription() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var getSubscriptionReq dashboards.GetSubscriptionRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Use = "get-subscription DASHBOARD_ID SCHEDULE_ID SUBSCRIPTION_ID"
|
||||
cmd.Short = `Get schedule subscription.`
|
||||
cmd.Long = `Get schedule subscription.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard which the subscription belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule which the subscription belongs.
|
||||
SUBSCRIPTION_ID: UUID identifying the subscription.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(3)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
getSubscriptionReq.DashboardId = args[0]
|
||||
getSubscriptionReq.ScheduleId = args[1]
|
||||
getSubscriptionReq.SubscriptionId = args[2]
|
||||
|
||||
response, err := w.Lakeview.GetSubscription(ctx, getSubscriptionReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range getSubscriptionOverrides {
|
||||
fn(cmd, &getSubscriptionReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start list command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var listOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.ListDashboardsRequest,
|
||||
)
|
||||
|
||||
func newList() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var listReq dashboards.ListDashboardsRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The number of dashboards to return per page.`)
|
||||
cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A page token, received from a previous ListDashboards call.`)
|
||||
cmd.Flags().BoolVar(&listReq.ShowTrashed, "show-trashed", listReq.ShowTrashed, `The flag to include dashboards located in the trash.`)
|
||||
cmd.Flags().Var(&listReq.View, "view", `Indicates whether to include all metadata from the dashboard in the response. Supported values: [DASHBOARD_VIEW_BASIC, DASHBOARD_VIEW_FULL]`)
|
||||
|
||||
cmd.Use = "list"
|
||||
cmd.Short = `List dashboards.`
|
||||
cmd.Long = `List dashboards.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(0)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
response := w.Lakeview.List(ctx, listReq)
|
||||
return cmdio.RenderIterator(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range listOverrides {
|
||||
fn(cmd, &listReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start list-schedules command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var listSchedulesOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.ListSchedulesRequest,
|
||||
)
|
||||
|
||||
func newListSchedules() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var listSchedulesReq dashboards.ListSchedulesRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Flags().IntVar(&listSchedulesReq.PageSize, "page-size", listSchedulesReq.PageSize, `The number of schedules to return per page.`)
|
||||
cmd.Flags().StringVar(&listSchedulesReq.PageToken, "page-token", listSchedulesReq.PageToken, `A page token, received from a previous ListSchedules call.`)
|
||||
|
||||
cmd.Use = "list-schedules DASHBOARD_ID"
|
||||
cmd.Short = `List dashboard schedules.`
|
||||
cmd.Long = `List dashboard schedules.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(1)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
listSchedulesReq.DashboardId = args[0]
|
||||
|
||||
response := w.Lakeview.ListSchedules(ctx, listSchedulesReq)
|
||||
return cmdio.RenderIterator(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range listSchedulesOverrides {
|
||||
fn(cmd, &listSchedulesReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start list-subscriptions command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var listSubscriptionsOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.ListSubscriptionsRequest,
|
||||
)
|
||||
|
||||
func newListSubscriptions() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var listSubscriptionsReq dashboards.ListSubscriptionsRequest
|
||||
|
||||
// TODO: short flags
|
||||
|
||||
cmd.Flags().IntVar(&listSubscriptionsReq.PageSize, "page-size", listSubscriptionsReq.PageSize, `The number of subscriptions to return per page.`)
|
||||
cmd.Flags().StringVar(&listSubscriptionsReq.PageToken, "page-token", listSubscriptionsReq.PageToken, `A page token, received from a previous ListSubscriptions call.`)
|
||||
|
||||
cmd.Use = "list-subscriptions DASHBOARD_ID SCHEDULE_ID"
|
||||
cmd.Short = `List schedule subscriptions.`
|
||||
cmd.Long = `List schedule subscriptions.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the subscription belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule to which the subscription belongs.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(2)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
listSubscriptionsReq.DashboardId = args[0]
|
||||
listSubscriptionsReq.ScheduleId = args[1]
|
||||
|
||||
response := w.Lakeview.ListSubscriptions(ctx, listSubscriptionsReq)
|
||||
return cmdio.RenderIterator(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range listSubscriptionsOverrides {
|
||||
fn(cmd, &listSubscriptionsReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start migrate command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
|
@ -576,4 +1154,79 @@ func newUpdate() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start update-schedule command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var updateScheduleOverrides []func(
|
||||
*cobra.Command,
|
||||
*dashboards.UpdateScheduleRequest,
|
||||
)
|
||||
|
||||
func newUpdateSchedule() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var updateScheduleReq dashboards.UpdateScheduleRequest
|
||||
var updateScheduleJson flags.JsonFlag
|
||||
|
||||
// TODO: short flags
|
||||
cmd.Flags().Var(&updateScheduleJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Flags().StringVar(&updateScheduleReq.DisplayName, "display-name", updateScheduleReq.DisplayName, `The display name for schedule.`)
|
||||
cmd.Flags().StringVar(&updateScheduleReq.Etag, "etag", updateScheduleReq.Etag, `The etag for the schedule.`)
|
||||
cmd.Flags().Var(&updateScheduleReq.PauseStatus, "pause-status", `The status indicates whether this schedule is paused or not. Supported values: [PAUSED, UNPAUSED]`)
|
||||
|
||||
cmd.Use = "update-schedule DASHBOARD_ID SCHEDULE_ID"
|
||||
cmd.Short = `Update dashboard schedule.`
|
||||
cmd.Long = `Update dashboard schedule.
|
||||
|
||||
Arguments:
|
||||
DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.
|
||||
SCHEDULE_ID: UUID identifying the schedule.`
|
||||
|
||||
// This command is being previewed; hide from help output.
|
||||
cmd.Hidden = true
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(2)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
if cmd.Flags().Changed("json") {
|
||||
err = updateScheduleJson.Unmarshal(&updateScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
||||
}
|
||||
updateScheduleReq.DashboardId = args[0]
|
||||
updateScheduleReq.ScheduleId = args[1]
|
||||
|
||||
response, err := w.Lakeview.UpdateSchedule(ctx, updateScheduleReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range updateScheduleOverrides {
|
||||
fn(cmd, &updateScheduleReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// end service Lakeview
|
||||
|
|
|
@ -23,7 +23,12 @@ func New() *cobra.Command {
|
|||
Long: `These endpoints are used for CRUD operations on query definitions. Query
|
||||
definitions include the target SQL warehouse, query text, name, description,
|
||||
tags, parameters, and visualizations. Queries can be scheduled using the
|
||||
sql_task type of the Jobs API, e.g. :method:jobs/create.`,
|
||||
sql_task type of the Jobs API, e.g. :method:jobs/create.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`,
|
||||
GroupID: "sql",
|
||||
Annotations: map[string]string{
|
||||
"package": "sql",
|
||||
|
@ -76,7 +81,12 @@ func newCreate() *cobra.Command {
|
|||
available SQL warehouses. Or you can copy the data_source_id from an
|
||||
existing query.
|
||||
|
||||
**Note**: You cannot add a visualization until you create the query.`
|
||||
**Note**: You cannot add a visualization until you create the query.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -135,7 +145,12 @@ func newDelete() *cobra.Command {
|
|||
|
||||
Moves a query to the trash. Trashed queries immediately disappear from
|
||||
searches and list views, and they cannot be used for alerts. The trash is
|
||||
deleted after 30 days.`
|
||||
deleted after 30 days.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -203,7 +218,12 @@ func newGet() *cobra.Command {
|
|||
cmd.Long = `Get a query definition.
|
||||
|
||||
Retrieve a query object definition along with contextual permissions
|
||||
information about the currently authenticated user.`
|
||||
information about the currently authenticated user.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -278,8 +298,13 @@ func newList() *cobra.Command {
|
|||
Gets a list of queries. Optionally, this list can be filtered by a search
|
||||
term.
|
||||
|
||||
### **Warning: Calling this API concurrently 10 or more times could result in
|
||||
throttling, service degradation, or a temporary ban.**`
|
||||
**Warning**: Calling this API concurrently 10 or more times could result in
|
||||
throttling, service degradation, or a temporary ban.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -330,7 +355,12 @@ func newRestore() *cobra.Command {
|
|||
cmd.Long = `Restore a query.
|
||||
|
||||
Restore a query that has been moved to the trash. A restored query appears in
|
||||
list views and searches. You can use restored queries for alerts.`
|
||||
list views and searches. You can use restored queries for alerts.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
@ -409,7 +439,12 @@ func newUpdate() *cobra.Command {
|
|||
|
||||
Modify this query definition.
|
||||
|
||||
**Note**: You cannot undo this operation.`
|
||||
**Note**: You cannot undo this operation.
|
||||
|
||||
**Note**: A new version of the Databricks SQL API will soon be available.
|
||||
[Learn more]
|
||||
|
||||
[Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
|
|
|
@ -366,6 +366,7 @@ func newUpdate() *cobra.Command {
|
|||
cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`)
|
||||
// TODO: complex arg: databricks_gcp_service_account
|
||||
cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`)
|
||||
cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`)
|
||||
cmd.Flags().StringVar(&updateReq.NewName, "new-name", updateReq.NewName, `New name for the storage credential.`)
|
||||
cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`)
|
||||
cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`)
|
||||
|
|
|
@ -42,6 +42,7 @@ func New() *cobra.Command {
|
|||
cmd.AddCommand(newGetIndex())
|
||||
cmd.AddCommand(newListIndexes())
|
||||
cmd.AddCommand(newQueryIndex())
|
||||
cmd.AddCommand(newQueryNextPage())
|
||||
cmd.AddCommand(newScanIndex())
|
||||
cmd.AddCommand(newSyncIndex())
|
||||
cmd.AddCommand(newUpsertDataVectorIndex())
|
||||
|
@ -416,6 +417,7 @@ func newQueryIndex() *cobra.Command {
|
|||
cmd.Flags().StringVar(&queryIndexReq.FiltersJson, "filters-json", queryIndexReq.FiltersJson, `JSON string representing query filters.`)
|
||||
cmd.Flags().IntVar(&queryIndexReq.NumResults, "num-results", queryIndexReq.NumResults, `Number of results to return.`)
|
||||
cmd.Flags().StringVar(&queryIndexReq.QueryText, "query-text", queryIndexReq.QueryText, `Query text.`)
|
||||
cmd.Flags().StringVar(&queryIndexReq.QueryType, "query-type", queryIndexReq.QueryType, `The query type to use.`)
|
||||
// TODO: array: query_vector
|
||||
cmd.Flags().Float64Var(&queryIndexReq.ScoreThreshold, "score-threshold", queryIndexReq.ScoreThreshold, `Threshold for the approximate nearest neighbor search.`)
|
||||
|
||||
|
@ -469,6 +471,76 @@ func newQueryIndex() *cobra.Command {
|
|||
return cmd
|
||||
}
|
||||
|
||||
// start query-next-page command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
// Functions can be added from the `init()` function in manually curated files in this directory.
|
||||
var queryNextPageOverrides []func(
|
||||
*cobra.Command,
|
||||
*vectorsearch.QueryVectorIndexNextPageRequest,
|
||||
)
|
||||
|
||||
func newQueryNextPage() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
var queryNextPageReq vectorsearch.QueryVectorIndexNextPageRequest
|
||||
var queryNextPageJson flags.JsonFlag
|
||||
|
||||
// TODO: short flags
|
||||
cmd.Flags().Var(&queryNextPageJson, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
|
||||
cmd.Flags().StringVar(&queryNextPageReq.EndpointName, "endpoint-name", queryNextPageReq.EndpointName, `Name of the endpoint.`)
|
||||
cmd.Flags().StringVar(&queryNextPageReq.PageToken, "page-token", queryNextPageReq.PageToken, `Page token returned from previous QueryVectorIndex or QueryVectorIndexNextPage API.`)
|
||||
|
||||
cmd.Use = "query-next-page INDEX_NAME"
|
||||
cmd.Short = `Query next page.`
|
||||
cmd.Long = `Query next page.
|
||||
|
||||
Use next_page_token returned from previous QueryVectorIndex or
|
||||
QueryVectorIndexNextPage request to fetch next page of results.
|
||||
|
||||
Arguments:
|
||||
INDEX_NAME: Name of the vector index to query.`
|
||||
|
||||
cmd.Annotations = make(map[string]string)
|
||||
|
||||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
check := root.ExactArgs(1)
|
||||
return check(cmd, args)
|
||||
}
|
||||
|
||||
cmd.PreRunE = root.MustWorkspaceClient
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
|
||||
ctx := cmd.Context()
|
||||
w := root.WorkspaceClient(ctx)
|
||||
|
||||
if cmd.Flags().Changed("json") {
|
||||
err = queryNextPageJson.Unmarshal(&queryNextPageReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
queryNextPageReq.IndexName = args[0]
|
||||
|
||||
response, err := w.VectorSearchIndexes.QueryNextPage(ctx, queryNextPageReq)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return cmdio.Render(ctx, response)
|
||||
}
|
||||
|
||||
// Disable completions since they are not applicable.
|
||||
// Can be overridden by manual implementation in `override.go`.
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
|
||||
// Apply optional overrides to this command.
|
||||
for _, fn := range queryNextPageOverrides {
|
||||
fn(cmd, &queryNextPageReq)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
// start scan-index command
|
||||
|
||||
// Slice with functions to override default command behavior.
|
||||
|
|
2
go.mod
2
go.mod
|
@ -5,7 +5,7 @@ go 1.21
|
|||
require (
|
||||
github.com/Masterminds/semver/v3 v3.2.1 // MIT
|
||||
github.com/briandowns/spinner v1.23.1 // Apache 2.0
|
||||
github.com/databricks/databricks-sdk-go v0.42.0 // Apache 2.0
|
||||
github.com/databricks/databricks-sdk-go v0.43.0 // Apache 2.0
|
||||
github.com/fatih/color v1.17.0 // MIT
|
||||
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
|
||||
github.com/google/uuid v1.6.0 // BSD-3-Clause
|
||||
|
|
|
@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
|
|||
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
|
||||
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
|
||||
github.com/databricks/databricks-sdk-go v0.42.0 h1:WKdoqnvb+jvsR9+IYkC3P4BH5eJHRzVOr59y3mCoY+s=
|
||||
github.com/databricks/databricks-sdk-go v0.42.0/go.mod h1:a9rr0FOHLL26kOjQjZZVFjIYmRABCbrAWVeundDEVG8=
|
||||
github.com/databricks/databricks-sdk-go v0.43.0 h1:x4laolWhYlsQg2t8yWEGyRPZy4/Wv3pKnLEoJfVin7I=
|
||||
github.com/databricks/databricks-sdk-go v0.43.0/go.mod h1:a9rr0FOHLL26kOjQjZZVFjIYmRABCbrAWVeundDEVG8=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
|
|
@ -42,7 +42,7 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value,
|
|||
// Dereference pointer if necessary
|
||||
for srcv.Kind() == reflect.Pointer {
|
||||
if srcv.IsNil() {
|
||||
return dyn.NilValue, nil
|
||||
return dyn.NilValue.WithLocation(ref.Location()), nil
|
||||
}
|
||||
srcv = srcv.Elem()
|
||||
|
||||
|
@ -55,32 +55,45 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value,
|
|||
}
|
||||
}
|
||||
|
||||
var v dyn.Value
|
||||
var err error
|
||||
switch srcv.Kind() {
|
||||
case reflect.Struct:
|
||||
return fromTypedStruct(srcv, ref, options...)
|
||||
v, err = fromTypedStruct(srcv, ref, options...)
|
||||
case reflect.Map:
|
||||
return fromTypedMap(srcv, ref)
|
||||
v, err = fromTypedMap(srcv, ref)
|
||||
case reflect.Slice:
|
||||
return fromTypedSlice(srcv, ref)
|
||||
v, err = fromTypedSlice(srcv, ref)
|
||||
case reflect.String:
|
||||
return fromTypedString(srcv, ref, options...)
|
||||
v, err = fromTypedString(srcv, ref, options...)
|
||||
case reflect.Bool:
|
||||
return fromTypedBool(srcv, ref, options...)
|
||||
v, err = fromTypedBool(srcv, ref, options...)
|
||||
case reflect.Int, reflect.Int32, reflect.Int64:
|
||||
return fromTypedInt(srcv, ref, options...)
|
||||
v, err = fromTypedInt(srcv, ref, options...)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return fromTypedFloat(srcv, ref, options...)
|
||||
v, err = fromTypedFloat(srcv, ref, options...)
|
||||
case reflect.Invalid:
|
||||
// If the value is untyped and not set (e.g. any type with nil value), we return nil.
|
||||
return dyn.NilValue, nil
|
||||
v, err = dyn.NilValue, nil
|
||||
default:
|
||||
return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind())
|
||||
}
|
||||
|
||||
return dyn.InvalidValue, fmt.Errorf("unsupported type: %s", srcv.Kind())
|
||||
// Ensure the location metadata is retained.
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
return v.WithLocation(ref.Location()), err
|
||||
}
|
||||
|
||||
func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) {
|
||||
// Check that the reference value is compatible or nil.
|
||||
switch ref.Kind() {
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(ref.MustString()) {
|
||||
return ref, nil
|
||||
}
|
||||
case dyn.KindMap, dyn.KindNil:
|
||||
default:
|
||||
return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind())
|
||||
|
@ -100,14 +113,19 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio
|
|||
refv = dyn.NilValue
|
||||
}
|
||||
|
||||
var options []fromTypedOptions
|
||||
if v.Kind() == reflect.Interface {
|
||||
options = append(options, includeZeroValues)
|
||||
}
|
||||
|
||||
// Convert the field taking into account the reference value (may be equal to config.NilValue).
|
||||
nv, err := fromTyped(v.Interface(), refv)
|
||||
nv, err := fromTyped(v.Interface(), refv, options...)
|
||||
if err != nil {
|
||||
return dyn.InvalidValue, err
|
||||
}
|
||||
|
||||
// Either if the key was set in the reference or the field is not zero-valued, we include it.
|
||||
if ok || nv != dyn.NilValue {
|
||||
if ok || nv.Kind() != dyn.KindNil {
|
||||
out.Set(refk, nv)
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +135,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio
|
|||
// 2. The reference is a map (i.e. the struct was and still is empty).
|
||||
// 3. The "includeZeroValues" option is set (i.e. the struct is a non-nil pointer).
|
||||
if out.Len() > 0 || ref.Kind() == dyn.KindMap || slices.Contains(options, includeZeroValues) {
|
||||
return dyn.NewValue(out, ref.Location()), nil
|
||||
return dyn.V(out), nil
|
||||
}
|
||||
|
||||
// Otherwise, return nil.
|
||||
|
@ -127,6 +145,11 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio
|
|||
func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
|
||||
// Check that the reference value is compatible or nil.
|
||||
switch ref.Kind() {
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(ref.MustString()) {
|
||||
return ref, nil
|
||||
}
|
||||
case dyn.KindMap, dyn.KindNil:
|
||||
default:
|
||||
return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind())
|
||||
|
@ -164,12 +187,17 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
|
|||
out.Set(refk, nv)
|
||||
}
|
||||
|
||||
return dyn.NewValue(out, ref.Location()), nil
|
||||
return dyn.V(out), nil
|
||||
}
|
||||
|
||||
func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
|
||||
// Check that the reference value is compatible or nil.
|
||||
switch ref.Kind() {
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(ref.MustString()) {
|
||||
return ref, nil
|
||||
}
|
||||
case dyn.KindSequence, dyn.KindNil:
|
||||
default:
|
||||
return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind())
|
||||
|
@ -186,7 +214,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
|
|||
refv := ref.Index(i)
|
||||
|
||||
// Use nil reference if there is no reference for this index.
|
||||
if refv == dyn.InvalidValue {
|
||||
if refv.Kind() == dyn.KindInvalid {
|
||||
refv = dyn.NilValue
|
||||
}
|
||||
|
||||
|
@ -199,7 +227,7 @@ func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) {
|
|||
out[i] = nv
|
||||
}
|
||||
|
||||
return dyn.NewValue(out, ref.Location()), nil
|
||||
return dyn.V(out), nil
|
||||
}
|
||||
|
||||
func fromTypedString(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) {
|
||||
|
|
|
@ -49,7 +49,7 @@ func TestFromTypedStructPointerZeroFields(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NilValue, nv)
|
||||
|
||||
// For an initialized pointer with a nil reference we expect a nil.
|
||||
// For an initialized pointer with a nil reference we expect an empty map.
|
||||
src = &Tmp{}
|
||||
nv, err = FromTyped(src, dyn.NilValue)
|
||||
require.NoError(t, err)
|
||||
|
@ -103,7 +103,7 @@ func TestFromTypedStructSetFields(t *testing.T) {
|
|||
}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) {
|
||||
func TestFromTypedStructSetFieldsRetainLocation(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo string `json:"foo"`
|
||||
Bar string `json:"bar"`
|
||||
|
@ -122,11 +122,9 @@ func TestFromTypedStructSetFieldsRetainLocationIfUnchanged(t *testing.T) {
|
|||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Assert foo has retained its location.
|
||||
// Assert foo and bar have retained their location.
|
||||
assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo"))
|
||||
|
||||
// Assert bar lost its location (because it was overwritten).
|
||||
assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar"))
|
||||
assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar"))
|
||||
}
|
||||
|
||||
func TestFromTypedStringMapWithZeroValue(t *testing.T) {
|
||||
|
@ -354,7 +352,7 @@ func TestFromTypedMapNonEmpty(t *testing.T) {
|
|||
}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) {
|
||||
func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) {
|
||||
var src = map[string]string{
|
||||
"foo": "bar",
|
||||
"bar": "qux",
|
||||
|
@ -368,11 +366,9 @@ func TestFromTypedMapNonEmptyRetainLocationIfUnchanged(t *testing.T) {
|
|||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Assert foo has retained its location.
|
||||
// Assert foo and bar have retained their locations.
|
||||
assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv.Get("foo"))
|
||||
|
||||
// Assert bar lost its location (because it was overwritten).
|
||||
assert.Equal(t, dyn.NewValue("qux", dyn.Location{}), nv.Get("bar"))
|
||||
assert.Equal(t, dyn.NewValue("qux", dyn.Location{File: "bar"}), nv.Get("bar"))
|
||||
}
|
||||
|
||||
func TestFromTypedMapFieldWithZeroValue(t *testing.T) {
|
||||
|
@ -429,7 +425,7 @@ func TestFromTypedSliceNonEmpty(t *testing.T) {
|
|||
}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) {
|
||||
func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) {
|
||||
var src = []string{
|
||||
"foo",
|
||||
"bar",
|
||||
|
@ -437,17 +433,15 @@ func TestFromTypedSliceNonEmptyRetainLocationIfUnchanged(t *testing.T) {
|
|||
|
||||
ref := dyn.V([]dyn.Value{
|
||||
dyn.NewValue("foo", dyn.Location{File: "foo"}),
|
||||
dyn.NewValue("baz", dyn.Location{File: "baz"}),
|
||||
dyn.NewValue("bar", dyn.Location{File: "bar"}),
|
||||
})
|
||||
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Assert foo has retained its location.
|
||||
// Assert foo and bar have retained their locations.
|
||||
assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv.Index(0))
|
||||
|
||||
// Assert bar lost its location (because it was overwritten).
|
||||
assert.Equal(t, dyn.NewValue("bar", dyn.Location{}), nv.Index(1))
|
||||
assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "bar"}), nv.Index(1))
|
||||
}
|
||||
|
||||
func TestFromTypedStringEmpty(t *testing.T) {
|
||||
|
@ -482,12 +476,20 @@ func TestFromTypedStringNonEmptyOverwrite(t *testing.T) {
|
|||
assert.Equal(t, dyn.V("new"), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedStringRetainsLocationsIfUnchanged(t *testing.T) {
|
||||
var src string = "foo"
|
||||
func TestFromTypedStringRetainsLocations(t *testing.T) {
|
||||
var ref = dyn.NewValue("foo", dyn.Location{File: "foo"})
|
||||
|
||||
// case: value has not been changed
|
||||
var src string = "foo"
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue("foo", dyn.Location{File: "foo"}), nv)
|
||||
|
||||
// case: value has been changed
|
||||
src = "bar"
|
||||
nv, err = FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue("bar", dyn.Location{File: "foo"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedStringTypeError(t *testing.T) {
|
||||
|
@ -529,12 +531,20 @@ func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) {
|
|||
assert.Equal(t, dyn.V(true), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedBoolRetainsLocationsIfUnchanged(t *testing.T) {
|
||||
var src bool = true
|
||||
func TestFromTypedBoolRetainsLocations(t *testing.T) {
|
||||
var ref = dyn.NewValue(true, dyn.Location{File: "foo"})
|
||||
|
||||
// case: value has not been changed
|
||||
var src bool = true
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(true, dyn.Location{File: "foo"}), nv)
|
||||
|
||||
// case: value has been changed
|
||||
src = false
|
||||
nv, err = FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "foo"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedBoolVariableReference(t *testing.T) {
|
||||
|
@ -584,12 +594,20 @@ func TestFromTypedIntNonEmptyOverwrite(t *testing.T) {
|
|||
assert.Equal(t, dyn.V(int64(1234)), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedIntRetainsLocationsIfUnchanged(t *testing.T) {
|
||||
var src int = 1234
|
||||
func TestFromTypedIntRetainsLocations(t *testing.T) {
|
||||
var ref = dyn.NewValue(1234, dyn.Location{File: "foo"})
|
||||
|
||||
// case: value has not been changed
|
||||
var src int = 1234
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(1234, dyn.Location{File: "foo"}), nv)
|
||||
|
||||
// case: value has been changed
|
||||
src = 1235
|
||||
nv, err = FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(int64(1235), dyn.Location{File: "foo"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedIntVariableReference(t *testing.T) {
|
||||
|
@ -639,12 +657,21 @@ func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) {
|
|||
assert.Equal(t, dyn.V(1.23), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedFloatRetainsLocationsIfUnchanged(t *testing.T) {
|
||||
var src float64 = 1.23
|
||||
func TestFromTypedFloatRetainsLocations(t *testing.T) {
|
||||
var src float64
|
||||
var ref = dyn.NewValue(1.23, dyn.Location{File: "foo"})
|
||||
|
||||
// case: value has not been changed
|
||||
src = 1.23
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(1.23, dyn.Location{File: "foo"}), nv)
|
||||
|
||||
// case: value has been changed
|
||||
src = 1.24
|
||||
nv, err = FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(1.24, dyn.Location{File: "foo"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedFloatVariableReference(t *testing.T) {
|
||||
|
@ -662,6 +689,42 @@ func TestFromTypedFloatTypeError(t *testing.T) {
|
|||
require.Error(t, err)
|
||||
}
|
||||
|
||||
func TestFromTypedAny(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo any `json:"foo"`
|
||||
Bar any `json:"bar"`
|
||||
Foz any `json:"foz"`
|
||||
Baz any `json:"baz"`
|
||||
}
|
||||
|
||||
src := Tmp{
|
||||
Foo: "foo",
|
||||
Bar: false,
|
||||
Foz: 0,
|
||||
Baz: map[string]any{
|
||||
"foo": "foo",
|
||||
"bar": 1234,
|
||||
"qux": 0,
|
||||
"nil": nil,
|
||||
},
|
||||
}
|
||||
|
||||
ref := dyn.NilValue
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.V(map[string]dyn.Value{
|
||||
"foo": dyn.V("foo"),
|
||||
"bar": dyn.V(false),
|
||||
"foz": dyn.V(int64(0)),
|
||||
"baz": dyn.V(map[string]dyn.Value{
|
||||
"foo": dyn.V("foo"),
|
||||
"bar": dyn.V(int64(1234)),
|
||||
"qux": dyn.V(int64(0)),
|
||||
"nil": dyn.V(nil),
|
||||
}),
|
||||
}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedAnyNil(t *testing.T) {
|
||||
var src any = nil
|
||||
var ref = dyn.NilValue
|
||||
|
@ -669,3 +732,35 @@ func TestFromTypedAnyNil(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NilValue, nv)
|
||||
}
|
||||
|
||||
func TestFromTypedNilPointerRetainsLocations(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo string `json:"foo"`
|
||||
Bar string `json:"bar"`
|
||||
}
|
||||
|
||||
var src *Tmp
|
||||
ref := dyn.NewValue(nil, dyn.Location{File: "foobar"})
|
||||
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedNilMapRetainsLocation(t *testing.T) {
|
||||
var src map[string]string
|
||||
ref := dyn.NewValue(nil, dyn.Location{File: "foobar"})
|
||||
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv)
|
||||
}
|
||||
|
||||
func TestFromTypedNilSliceRetainsLocation(t *testing.T) {
|
||||
var src []string
|
||||
ref := dyn.NewValue(nil, dyn.Location{File: "foobar"})
|
||||
|
||||
nv, err := FromTyped(src, ref)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, dyn.NewValue(nil, dyn.Location{File: "foobar"}), nv)
|
||||
}
|
||||
|
|
|
@ -56,6 +56,8 @@ func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value, seen []
|
|||
return n.normalizeInt(typ, src, path)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return n.normalizeFloat(typ, src, path)
|
||||
case reflect.Interface:
|
||||
return n.normalizeInterface(typ, src, path)
|
||||
}
|
||||
|
||||
return dyn.InvalidValue, diag.Errorf("unsupported type: %s", typ.Kind())
|
||||
|
@ -166,8 +168,15 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen
|
|||
return dyn.NewValue(out, src.Location()), diags
|
||||
case dyn.KindNil:
|
||||
return src, diags
|
||||
|
||||
case dyn.KindString:
|
||||
// Return verbatim if it's a pure variable reference.
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
return src, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Cannot interpret as a struct.
|
||||
return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path))
|
||||
}
|
||||
|
||||
|
@ -197,8 +206,15 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r
|
|||
return dyn.NewValue(out, src.Location()), diags
|
||||
case dyn.KindNil:
|
||||
return src, diags
|
||||
|
||||
case dyn.KindString:
|
||||
// Return verbatim if it's a pure variable reference.
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
return src, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Cannot interpret as a map.
|
||||
return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path))
|
||||
}
|
||||
|
||||
|
@ -225,8 +241,15 @@ func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value, seen [
|
|||
return dyn.NewValue(out, src.Location()), diags
|
||||
case dyn.KindNil:
|
||||
return src, diags
|
||||
|
||||
case dyn.KindString:
|
||||
// Return verbatim if it's a pure variable reference.
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
return src, nil
|
||||
}
|
||||
}
|
||||
|
||||
// Cannot interpret as a slice.
|
||||
return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindSequence, src, path))
|
||||
}
|
||||
|
||||
|
@ -371,3 +394,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d
|
|||
|
||||
return dyn.NewValue(out, src.Location()), diags
|
||||
}
|
||||
|
||||
func (n normalizeOptions) normalizeInterface(typ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) {
|
||||
return src, nil
|
||||
}
|
||||
|
|
|
@ -223,6 +223,52 @@ func TestNormalizeStructIncludeMissingFieldsOnRecursiveType(t *testing.T) {
|
|||
}), vout)
|
||||
}
|
||||
|
||||
func TestNormalizeStructVariableReference(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
var typ Tmp
|
||||
vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(typ, vin)
|
||||
assert.Empty(t, err)
|
||||
assert.Equal(t, vin, vout)
|
||||
}
|
||||
|
||||
func TestNormalizeStructRandomStringError(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
var typ Tmp
|
||||
vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected map, found string`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeStructIntError(t *testing.T) {
|
||||
type Tmp struct {
|
||||
Foo string `json:"foo"`
|
||||
}
|
||||
|
||||
var typ Tmp
|
||||
vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected map, found int`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeMap(t *testing.T) {
|
||||
var typ map[string]string
|
||||
vin := dyn.V(map[string]dyn.Value{
|
||||
|
@ -312,6 +358,40 @@ func TestNormalizeMapNestedError(t *testing.T) {
|
|||
)
|
||||
}
|
||||
|
||||
func TestNormalizeMapVariableReference(t *testing.T) {
|
||||
var typ map[string]string
|
||||
vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(typ, vin)
|
||||
assert.Empty(t, err)
|
||||
assert.Equal(t, vin, vout)
|
||||
}
|
||||
|
||||
func TestNormalizeMapRandomStringError(t *testing.T) {
|
||||
var typ map[string]string
|
||||
vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected map, found string`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeMapIntError(t *testing.T) {
|
||||
var typ map[string]string
|
||||
vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected map, found int`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeSlice(t *testing.T) {
|
||||
var typ []string
|
||||
vin := dyn.V([]dyn.Value{
|
||||
|
@ -400,6 +480,40 @@ func TestNormalizeSliceNestedError(t *testing.T) {
|
|||
)
|
||||
}
|
||||
|
||||
func TestNormalizeSliceVariableReference(t *testing.T) {
|
||||
var typ []string
|
||||
vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(typ, vin)
|
||||
assert.Empty(t, err)
|
||||
assert.Equal(t, vin, vout)
|
||||
}
|
||||
|
||||
func TestNormalizeSliceRandomStringError(t *testing.T) {
|
||||
var typ []string
|
||||
vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected sequence, found string`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeSliceIntError(t *testing.T) {
|
||||
var typ []string
|
||||
vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
_, err := Normalize(typ, vin)
|
||||
assert.Len(t, err, 1)
|
||||
assert.Equal(t, diag.Diagnostic{
|
||||
Severity: diag.Warning,
|
||||
Summary: `expected sequence, found int`,
|
||||
Location: vin.Location(),
|
||||
Path: dyn.EmptyPath,
|
||||
}, err[0])
|
||||
}
|
||||
|
||||
func TestNormalizeString(t *testing.T) {
|
||||
var typ string
|
||||
vin := dyn.V("string")
|
||||
|
@ -725,3 +839,29 @@ func TestNormalizeAnchors(t *testing.T) {
|
|||
"foo": "bar",
|
||||
}, vout.AsAny())
|
||||
}
|
||||
|
||||
func TestNormalizeBoolToAny(t *testing.T) {
|
||||
var typ any
|
||||
vin := dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(&typ, vin)
|
||||
assert.Len(t, err, 0)
|
||||
assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1}), vout)
|
||||
}
|
||||
|
||||
func TestNormalizeIntToAny(t *testing.T) {
|
||||
var typ any
|
||||
vin := dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(&typ, vin)
|
||||
assert.Len(t, err, 0)
|
||||
assert.Equal(t, dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1}), vout)
|
||||
}
|
||||
|
||||
func TestNormalizeSliceToAny(t *testing.T) {
|
||||
var typ any
|
||||
v1 := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
v2 := dyn.NewValue(2, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vin := dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1})
|
||||
vout, err := Normalize(&typ, vin)
|
||||
assert.Len(t, err, 0)
|
||||
assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1}), vout)
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ func ToTyped(dst any, src dyn.Value) error {
|
|||
for dstv.Kind() == reflect.Pointer {
|
||||
// If the source value is nil and the destination is a settable pointer,
|
||||
// set the destination to nil. Also see `end_to_end_test.go`.
|
||||
if dstv.CanSet() && src == dyn.NilValue {
|
||||
if dstv.CanSet() && src.Kind() == dyn.KindNil {
|
||||
dstv.SetZero()
|
||||
return nil
|
||||
}
|
||||
|
@ -46,6 +46,8 @@ func ToTyped(dst any, src dyn.Value) error {
|
|||
return toTypedInt(dstv, src)
|
||||
case reflect.Float32, reflect.Float64:
|
||||
return toTypedFloat(dstv, src)
|
||||
case reflect.Interface:
|
||||
return toTypedInterface(dstv, src)
|
||||
}
|
||||
|
||||
return fmt.Errorf("unsupported type: %s", dstv.Kind())
|
||||
|
@ -101,6 +103,12 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error {
|
|||
case dyn.KindNil:
|
||||
dst.SetZero()
|
||||
return nil
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
dst.SetZero()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return TypeError{
|
||||
|
@ -132,6 +140,12 @@ func toTypedMap(dst reflect.Value, src dyn.Value) error {
|
|||
case dyn.KindNil:
|
||||
dst.SetZero()
|
||||
return nil
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
dst.SetZero()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return TypeError{
|
||||
|
@ -157,6 +171,12 @@ func toTypedSlice(dst reflect.Value, src dyn.Value) error {
|
|||
case dyn.KindNil:
|
||||
dst.SetZero()
|
||||
return nil
|
||||
case dyn.KindString:
|
||||
// Ignore pure variable references (e.g. ${var.foo}).
|
||||
if dynvar.IsPureVariableReference(src.MustString()) {
|
||||
dst.SetZero()
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return TypeError{
|
||||
|
@ -260,3 +280,8 @@ func toTypedFloat(dst reflect.Value, src dyn.Value) error {
|
|||
msg: fmt.Sprintf("expected a float, found a %s", src.Kind()),
|
||||
}
|
||||
}
|
||||
|
||||
func toTypedInterface(dst reflect.Value, src dyn.Value) error {
|
||||
dst.Set(reflect.ValueOf(src.AsAny()))
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -511,3 +511,25 @@ func TestToTypedWithAliasKeyType(t *testing.T) {
|
|||
assert.Equal(t, "bar", out["foo"])
|
||||
assert.Equal(t, "baz", out["bar"])
|
||||
}
|
||||
|
||||
func TestToTypedAnyWithBool(t *testing.T) {
|
||||
var out any
|
||||
err := ToTyped(&out, dyn.V(false))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, false, out)
|
||||
|
||||
err = ToTyped(&out, dyn.V(true))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, true, out)
|
||||
}
|
||||
|
||||
func TestToTypedAnyWithMap(t *testing.T) {
|
||||
var out any
|
||||
v := dyn.V(map[string]dyn.Value{
|
||||
"foo": dyn.V("bar"),
|
||||
"bar": dyn.V("baz"),
|
||||
})
|
||||
err := ToTyped(&out, v)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, map[string]any{"foo": "bar", "bar": "baz"}, out)
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"github.com/databricks/cli/libs/dyn"
|
||||
)
|
||||
|
||||
const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`
|
||||
const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`
|
||||
|
||||
var re = regexp.MustCompile(VariableRegex)
|
||||
|
||||
|
|
|
@ -247,3 +247,63 @@ func TestResolveWithInterpolateAliasedRef(t *testing.T) {
|
|||
assert.Equal(t, "a", getByPath(t, out, "b").MustString())
|
||||
assert.Equal(t, "a", getByPath(t, out, "c").MustString())
|
||||
}
|
||||
|
||||
func TestResolveIndexedRefs(t *testing.T) {
|
||||
in := dyn.V(map[string]dyn.Value{
|
||||
"slice": dyn.V([]dyn.Value{dyn.V("a"), dyn.V("b")}),
|
||||
"a": dyn.V("a: ${slice[0]}"),
|
||||
})
|
||||
|
||||
out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "a: a", getByPath(t, out, "a").MustString())
|
||||
}
|
||||
|
||||
func TestResolveIndexedRefsFromMap(t *testing.T) {
|
||||
in := dyn.V(map[string]dyn.Value{
|
||||
"map": dyn.V(
|
||||
map[string]dyn.Value{
|
||||
"slice": dyn.V([]dyn.Value{dyn.V("a")}),
|
||||
}),
|
||||
"a": dyn.V("a: ${map.slice[0]}"),
|
||||
})
|
||||
|
||||
out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "a: a", getByPath(t, out, "a").MustString())
|
||||
}
|
||||
|
||||
func TestResolveMapFieldFromIndexedRefs(t *testing.T) {
|
||||
in := dyn.V(map[string]dyn.Value{
|
||||
"map": dyn.V(
|
||||
map[string]dyn.Value{
|
||||
"slice": dyn.V([]dyn.Value{
|
||||
dyn.V(map[string]dyn.Value{
|
||||
"value": dyn.V("a"),
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
"a": dyn.V("a: ${map.slice[0].value}"),
|
||||
})
|
||||
|
||||
out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "a: a", getByPath(t, out, "a").MustString())
|
||||
}
|
||||
|
||||
func TestResolveNestedIndexedRefs(t *testing.T) {
|
||||
in := dyn.V(map[string]dyn.Value{
|
||||
"slice": dyn.V([]dyn.Value{
|
||||
dyn.V([]dyn.Value{dyn.V("a")}),
|
||||
}),
|
||||
"a": dyn.V("a: ${slice[0][0]}"),
|
||||
})
|
||||
|
||||
out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in))
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "a: a", getByPath(t, out, "a").MustString())
|
||||
}
|
||||
|
|
|
@ -30,10 +30,6 @@ func Override(leftRoot dyn.Value, rightRoot dyn.Value, visitor OverrideVisitor)
|
|||
}
|
||||
|
||||
func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor OverrideVisitor) (dyn.Value, error) {
|
||||
if left == dyn.NilValue && right == dyn.NilValue {
|
||||
return dyn.NilValue, nil
|
||||
}
|
||||
|
||||
if left.Kind() != right.Kind() {
|
||||
return visitor.VisitUpdate(basePath, left, right)
|
||||
}
|
||||
|
@ -98,9 +94,11 @@ func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor Overri
|
|||
} else {
|
||||
return visitor.VisitUpdate(basePath, left, right)
|
||||
}
|
||||
case dyn.KindNil:
|
||||
return left, nil
|
||||
}
|
||||
|
||||
return dyn.InvalidValue, fmt.Errorf("unexpected kind %s", left.Kind())
|
||||
return dyn.InvalidValue, fmt.Errorf("unexpected kind %s at %s", left.Kind(), basePath.String())
|
||||
}
|
||||
|
||||
func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dyn.Mapping, visitor OverrideVisitor) (dyn.Mapping, error) {
|
||||
|
|
|
@ -330,9 +330,9 @@ func TestOverride_Primitive(t *testing.T) {
|
|||
{
|
||||
name: "nil (not updated)",
|
||||
state: visitorState{},
|
||||
left: dyn.NilValue,
|
||||
right: dyn.NilValue,
|
||||
expected: dyn.NilValue,
|
||||
left: dyn.NilValue.WithLocation(leftLocation),
|
||||
right: dyn.NilValue.WithLocation(rightLocation),
|
||||
expected: dyn.NilValue.WithLocation(leftLocation),
|
||||
},
|
||||
{
|
||||
name: "nil (updated)",
|
||||
|
|
Loading…
Reference in New Issue