From 5f4279160990b2fd4a4d522292e1ff832f307892 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 26 Jun 2024 12:25:32 +0200 Subject: [PATCH] Added support for complex variables (#1467) ## Changes Added support for complex variables Now it's possible to add and use complex variables as shown below ``` bundle: name: complex-variables resources: jobs: my_job: job_clusters: - job_cluster_key: key new_cluster: ${var.cluster} tasks: - task_key: test job_cluster_key: key variables: cluster: description: "A cluster definition" type: complex default: spark_version: "13.2.x-scala2.11" node_type_id: "Standard_DS3_v2" num_workers: 2 spark_conf: spark.speculation: true spark.databricks.delta.retentionDurationCheck.enabled: false ``` Fixes #1298 - [x] Support for complex variables - [x] Allow variable overrides (with shortcut) in targets - [x] Don't allow to provide complex variables via flag or env variable - [x] Fail validation if complex value is used but not `type: complex` provided - [x] Support using variables inside complex variables ## Tests Added unit tests --------- Co-authored-by: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> --- .../resolve_resource_references_test.go | 32 ++- .../mutator/resolve_variable_references.go | 55 +++++- .../resolve_variable_references_test.go | 185 +++++++++++++++++- bundle/config/mutator/set_variables.go | 8 +- bundle/config/mutator/set_variables_test.go | 47 +++-- bundle/config/root.go | 22 ++- bundle/config/root_test.go | 21 +- bundle/config/variable/variable.go | 39 +++- bundle/phases/initialize.go | 6 +- bundle/schema/schema_test.go | 96 ++++----- bundle/tests/complex_variables_test.go | 62 ++++++ bundle/tests/variables/complex/databricks.yml | 49 +++++ bundle/tests/variables_test.go | 10 +- libs/dyn/convert/from_typed.go | 22 ++- libs/dyn/convert/from_typed_test.go | 36 ++++ libs/dyn/convert/normalize.go | 27 +++ libs/dyn/convert/normalize_test.go | 140 +++++++++++++ libs/dyn/convert/to_typed.go | 25 +++ libs/dyn/convert/to_typed_test.go | 22 +++ libs/dyn/dynvar/ref.go | 2 +- libs/dyn/dynvar/resolve_test.go | 60 ++++++ 21 files changed, 853 insertions(+), 113 deletions(-) create mode 100644 bundle/tests/complex_variables_test.go create mode 100644 bundle/tests/variables/complex/databricks.yml diff --git a/bundle/config/mutator/resolve_resource_references_test.go b/bundle/config/mutator/resolve_resource_references_test.go index 214b712e..86a03b23 100644 --- a/bundle/config/mutator/resolve_resource_references_test.go +++ b/bundle/config/mutator/resolve_resource_references_test.go @@ -35,7 +35,7 @@ func TestResolveClusterReference(t *testing.T) { }, }, "some-variable": { - Value: &justString, + Value: justString, }, }, }, @@ -53,8 +53,8 @@ func TestResolveClusterReference(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["my-cluster-id-1"].Value) - require.Equal(t, "9876-5432-xywz", *b.Config.Variables["my-cluster-id-2"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["my-cluster-id-1"].Value) + require.Equal(t, "9876-5432-xywz", b.Config.Variables["my-cluster-id-2"].Value) } func TestResolveNonExistentClusterReference(t *testing.T) { @@ -69,7 +69,7 @@ func TestResolveNonExistentClusterReference(t *testing.T) { }, }, "some-variable": { - Value: &justString, + Value: justString, }, }, }, @@ -105,7 +105,7 @@ func TestNoLookupIfVariableIsSet(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "random value", *b.Config.Variables["my-cluster-id"].Value) + require.Equal(t, "random value", b.Config.Variables["my-cluster-id"].Value) } func TestResolveServicePrincipal(t *testing.T) { @@ -132,14 +132,11 @@ func TestResolveServicePrincipal(t *testing.T) { diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) - require.Equal(t, "app-1234", *b.Config.Variables["my-sp"].Value) + require.Equal(t, "app-1234", b.Config.Variables["my-sp"].Value) } func TestResolveVariableReferencesInVariableLookups(t *testing.T) { - s := func(s string) *string { - return &s - } - + s := "bar" b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -147,7 +144,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) { }, Variables: map[string]*variable.Variable{ "foo": { - Value: s("bar"), + Value: s, }, "lookup": { Lookup: &variable.Lookup{ @@ -168,7 +165,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) { diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) require.NoError(t, diags.Error()) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) } func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { @@ -197,22 +194,15 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { } func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) { - s := func(s string) *string { - return &s - } - b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ Target: "dev", }, Variables: map[string]*variable.Variable{ - "foo": { - Value: s("bar"), - }, "lookup": { Lookup: &variable.Lookup{ - Cluster: "cluster-${var.foo}-${bundle.target}", + Cluster: "cluster-${bundle.target}", }, }, }, @@ -227,5 +217,5 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) { diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())) require.NoError(t, diags.Error()) - require.Equal(t, "1234-5678-abcd", *b.Config.Variables["lookup"].Value) + require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) } diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index f7fce6c8..cddc85cb 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -17,6 +17,7 @@ type resolveVariableReferences struct { prefixes []string pattern dyn.Pattern lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error) + skipFn func(dyn.Value) bool } func ResolveVariableReferences(prefixes ...string) bundle.Mutator { @@ -31,6 +32,18 @@ func ResolveVariableReferencesInLookup() bundle.Mutator { }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("lookup")), lookupFn: lookupForVariables} } +func ResolveVariableReferencesInComplexVariables() bundle.Mutator { + return &resolveVariableReferences{prefixes: []string{ + "bundle", + "workspace", + "variables", + }, + pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")), + lookupFn: lookupForComplexVariables, + skipFn: skipResolvingInNonComplexVariables, + } +} + func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { // Future opportunity: if we lookup this path in both the given root // and the synthesized root, we know if it was explicitly set or implied to be empty. @@ -38,6 +51,34 @@ func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) { return dyn.GetByPath(v, path) } +func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) { + if path[0].Key() != "variables" { + return lookup(v, path) + } + + varV, err := dyn.GetByPath(v, path[:len(path)-1]) + if err != nil { + return dyn.InvalidValue, err + } + + var vv variable.Variable + err = convert.ToTyped(&vv, varV) + if err != nil { + return dyn.InvalidValue, err + } + + if vv.Type == variable.VariableTypeComplex { + return dyn.InvalidValue, fmt.Errorf("complex variables cannot contain references to another complex variables") + } + + return lookup(v, path) +} + +func skipResolvingInNonComplexVariables(v dyn.Value) bool { + _, ok := v.AsMap() + return !ok +} + func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) { if path[0].Key() != "variables" { return lookup(v, path) @@ -100,17 +141,27 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) // Resolve variable references in all values. return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) { // Rewrite the shorthand path ${var.foo} into ${variables.foo.value}. - if path.HasPrefix(varPath) && len(path) == 2 { - path = dyn.NewPath( + if path.HasPrefix(varPath) { + newPath := dyn.NewPath( dyn.Key("variables"), path[1], dyn.Key("value"), ) + + if len(path) > 2 { + newPath = newPath.Append(path[2:]...) + } + + path = newPath } // Perform resolution only if the path starts with one of the specified prefixes. for _, prefix := range prefixes { if path.HasPrefix(prefix) { + // Skip resolution if there is a skip function and it returns true. + if m.skipFn != nil && m.skipFn(v) { + return dyn.InvalidValue, dynvar.ErrSkipResolution + } return m.lookupFn(normalized, path) } } diff --git a/bundle/config/mutator/resolve_variable_references_test.go b/bundle/config/mutator/resolve_variable_references_test.go index 651ea3d2..2b88a249 100644 --- a/bundle/config/mutator/resolve_variable_references_test.go +++ b/bundle/config/mutator/resolve_variable_references_test.go @@ -43,10 +43,6 @@ func TestResolveVariableReferences(t *testing.T) { } func TestResolveVariableReferencesToBundleVariables(t *testing.T) { - s := func(s string) *string { - return &s - } - b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -57,7 +53,7 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) { }, Variables: map[string]*variable.Variable{ "foo": { - Value: s("bar"), + Value: "bar", }, }, }, @@ -195,3 +191,182 @@ func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) { assert.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.Autoscale.MaxWorkers) assert.Equal(t, 0.5, b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].NewCluster.AzureAttributes.SpotBidMaxPrice) } + +func TestResolveComplexVariable(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables")) + require.NoError(t, diags.Error()) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers) +} + +func TestResolveComplexVariableReferencesToFields(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0].new_cluster") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("node_type_id")), dyn.V("${var.cluster.node_type_id}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables")) + require.NoError(t, diags.Error()) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId) +} + +func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Name: "example", + }, + Variables: map[string]*variable.Variable{ + "cluster": { + Value: map[string]any{ + "node_type_id": "Standard_DS3_v2", + "num_workers": 2, + "spark_conf": "${var.spark_conf}", + }, + Type: variable.VariableTypeComplex, + }, + "spark_conf": { + Value: map[string]any{ + "spark.executor.memory": "4g", + "spark.executor.cores": "2", + }, + Type: variable.VariableTypeComplex, + }, + }, + + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + NewCluster: compute.ClusterSpec{ + NodeTypeId: "random", + }, + }, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + + // Assign the variables to the dynamic configuration. + diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + var p dyn.Path + var err error + + p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0]") + v, err = dyn.SetByPath(v, p.Append(dyn.Key("new_cluster")), dyn.V("${var.cluster}")) + require.NoError(t, err) + + return v, nil + }) + return diag.FromErr(err) + }) + require.NoError(t, diags.Error()) + + diags = bundle.Apply(ctx, b, bundle.Seq(ResolveVariableReferencesInComplexVariables(), ResolveVariableReferences("bundle", "workspace", "variables"))) + require.ErrorContains(t, diags.Error(), "complex variables cannot contain references to another complex variables") +} diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 0cee24ab..b3a9cf40 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -30,6 +30,10 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di // case: read and set variable value from process environment envVarName := bundleVarPrefix + name if val, ok := env.Lookup(ctx, envVarName); ok { + if v.IsComplex() { + return diag.Errorf(`setting via environment variables (%s) is not supported for complex variable %s`, envVarName, name) + } + err := v.Set(val) if err != nil { return diag.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %v`, val, name, envVarName, err) @@ -45,9 +49,9 @@ func setVariable(ctx context.Context, v *variable.Variable, name string) diag.Di // case: Set the variable to its default value if v.HasDefault() { - err := v.Set(*v.Default) + err := v.Set(v.Default) if err != nil { - return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, *v.Default, name, err) + return diag.Errorf(`failed to assign default value from config "%s" to variable %s with error: %v`, v.Default, name, err) } return nil } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index ae4f7989..65dedee9 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -15,7 +15,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { defaultVal := "default" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, + Default: defaultVal, } // set value for variable as an environment variable @@ -23,19 +23,19 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "process-env") + assert.Equal(t, variable.Value, "process-env") } func TestSetVariableUsingDefaultValue(t *testing.T) { defaultVal := "default" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, + Default: defaultVal, } diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "default") + assert.Equal(t, variable.Value, "default") } func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { @@ -43,15 +43,15 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { val := "assigned-value" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, - Value: &val, + Default: defaultVal, + Value: val, } // since a value is already assigned to the variable, it would not be overridden // by the default value diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "assigned-value") + assert.Equal(t, variable.Value, "assigned-value") } func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { @@ -59,8 +59,8 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { val := "assigned-value" variable := variable.Variable{ Description: "a test variable", - Default: &defaultVal, - Value: &val, + Default: defaultVal, + Value: val, } // set value for variable as an environment variable @@ -70,7 +70,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { // by the value from environment diags := setVariable(context.Background(), &variable, "foo") require.NoError(t, diags.Error()) - assert.Equal(t, *variable.Value, "assigned-value") + assert.Equal(t, variable.Value, "assigned-value") } func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { @@ -92,15 +92,15 @@ func TestSetVariablesMutator(t *testing.T) { Variables: map[string]*variable.Variable{ "a": { Description: "resolved to default value", - Default: &defaultValForA, + Default: defaultValForA, }, "b": { Description: "resolved from environment vairables", - Default: &defaultValForB, + Default: defaultValForB, }, "c": { Description: "has already been assigned a value", - Value: &valForC, + Value: valForC, }, }, }, @@ -110,7 +110,22 @@ func TestSetVariablesMutator(t *testing.T) { diags := bundle.Apply(context.Background(), b, SetVariables()) require.NoError(t, diags.Error()) - assert.Equal(t, "default-a", *b.Config.Variables["a"].Value) - assert.Equal(t, "env-var-b", *b.Config.Variables["b"].Value) - assert.Equal(t, "assigned-val-c", *b.Config.Variables["c"].Value) + assert.Equal(t, "default-a", b.Config.Variables["a"].Value) + assert.Equal(t, "env-var-b", b.Config.Variables["b"].Value) + assert.Equal(t, "assigned-val-c", b.Config.Variables["c"].Value) +} + +func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) { + defaultVal := "default" + variable := variable.Variable{ + Description: "a test variable", + Default: defaultVal, + Type: variable.VariableTypeComplex, + } + + // set value for variable as an environment variable + t.Setenv("BUNDLE_VAR_foo", "process-env") + + diags := setVariable(context.Background(), &variable, "foo") + assert.ErrorContains(t, diags.Error(), "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo") } diff --git a/bundle/config/root.go b/bundle/config/root.go index 2ce3a138..0def1167 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -267,6 +267,11 @@ func (r *Root) InitializeVariables(vars []string) error { if _, ok := r.Variables[name]; !ok { return fmt.Errorf("variable %s has not been defined", name) } + + if r.Variables[name].IsComplex() { + return fmt.Errorf("setting variables of complex type via --var flag is not supported: %s", name) + } + err := r.Variables[name].Set(val) if err != nil { return fmt.Errorf("failed to assign %s to %s: %s", val, name, err) @@ -419,7 +424,7 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) { } // For each variable, normalize its contents if it is a single string. - return dyn.Map(target, "variables", dyn.Foreach(func(_ dyn.Path, variable dyn.Value) (dyn.Value, error) { + return dyn.Map(target, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) { switch variable.Kind() { case dyn.KindString, dyn.KindBool, dyn.KindFloat, dyn.KindInt: @@ -430,6 +435,21 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) { "default": variable, }, variable.Location()), nil + case dyn.KindMap, dyn.KindSequence: + // Check if the original definition of variable has a type field. + typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type"))) + if err != nil { + return variable, nil + } + + if typeV.MustString() == "complex" { + return dyn.NewValue(map[string]dyn.Value{ + "default": variable, + }, variable.Location()), nil + } + + return variable, nil + default: return variable, nil } diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index b5676884..27cc3d22 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -51,7 +51,7 @@ func TestInitializeVariables(t *testing.T) { root := &Root{ Variables: map[string]*variable.Variable{ "foo": { - Default: &fooDefault, + Default: fooDefault, Description: "an optional variable since default is defined", }, "bar": { @@ -62,8 +62,8 @@ func TestInitializeVariables(t *testing.T) { err := root.InitializeVariables([]string{"foo=123", "bar=456"}) assert.NoError(t, err) - assert.Equal(t, "123", *(root.Variables["foo"].Value)) - assert.Equal(t, "456", *(root.Variables["bar"].Value)) + assert.Equal(t, "123", (root.Variables["foo"].Value)) + assert.Equal(t, "456", (root.Variables["bar"].Value)) } func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) { @@ -77,7 +77,7 @@ func TestInitializeVariablesWithAnEqualSignInValue(t *testing.T) { err := root.InitializeVariables([]string{"foo=123=567"}) assert.NoError(t, err) - assert.Equal(t, "123=567", *(root.Variables["foo"].Value)) + assert.Equal(t, "123=567", (root.Variables["foo"].Value)) } func TestInitializeVariablesInvalidFormat(t *testing.T) { @@ -119,3 +119,16 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) { require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, Development, root.Bundle.Mode) } + +func TestInitializeComplexVariablesViaFlagIsNotAllowed(t *testing.T) { + root := &Root{ + Variables: map[string]*variable.Variable{ + "foo": { + Type: variable.VariableTypeComplex, + }, + }, + } + + err := root.InitializeVariables([]string{"foo=123"}) + assert.ErrorContains(t, err, "setting variables of complex type via --var flag is not supported: foo") +} diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go index 5e700a9b..ba94f9c8 100644 --- a/bundle/config/variable/variable.go +++ b/bundle/config/variable/variable.go @@ -2,12 +2,27 @@ package variable import ( "fmt" + "reflect" +) + +// We are using `any` because since introduction of complex variables, +// variables can be of any type. +// Type alias is used to make it easier to understand the code. +type VariableValue = any + +type VariableType string + +const ( + VariableTypeComplex VariableType = "complex" ) // An input variable for the bundle config type Variable struct { + // A type of the variable. This is used to validate the value of the variable + Type VariableType `json:"type,omitempty"` + // A default value which then makes the variable optional - Default *string `json:"default,omitempty"` + Default VariableValue `json:"default,omitempty"` // Documentation for this input variable Description string `json:"description,omitempty"` @@ -21,7 +36,7 @@ type Variable struct { // 4. Default value defined in variable definition // 5. Throw error, since if no default value is defined, then the variable // is required - Value *string `json:"value,omitempty" bundle:"readonly"` + Value VariableValue `json:"value,omitempty" bundle:"readonly"` // The value of this field will be used to lookup the resource by name // And assign the value of the variable to ID of the resource found. @@ -39,10 +54,24 @@ func (v *Variable) HasValue() bool { return v.Value != nil } -func (v *Variable) Set(val string) error { +func (v *Variable) Set(val VariableValue) error { if v.HasValue() { - return fmt.Errorf("variable has already been assigned value: %s", *v.Value) + return fmt.Errorf("variable has already been assigned value: %s", v.Value) } - v.Value = &val + + rv := reflect.ValueOf(val) + switch rv.Kind() { + case reflect.Struct, reflect.Array, reflect.Slice, reflect.Map: + if v.Type != VariableTypeComplex { + return fmt.Errorf("variable type is not complex") + } + } + + v.Value = val + return nil } + +func (v *Variable) IsComplex() bool { + return v.Type == VariableTypeComplex +} diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index d96ee0eb..79fca9df 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -29,11 +29,13 @@ func Initialize() bundle.Mutator { mutator.ExpandWorkspaceRoot(), mutator.DefineDefaultWorkspacePaths(), mutator.SetVariables(), - // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences - // and ResolveVariableReferences. See what is expected in PythonMutatorPhaseInit doc + // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences, + // ResolveVariableReferencesInComplexVariables and ResolveVariableReferences. + // See what is expected in PythonMutatorPhaseInit doc pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit), mutator.ResolveVariableReferencesInLookup(), mutator.ResolveResourceReferences(), + mutator.ResolveVariableReferencesInComplexVariables(), mutator.ResolveVariableReferences( "bundle", "workspace", diff --git a/bundle/schema/schema_test.go b/bundle/schema/schema_test.go index ea4fd102..6d9df0cc 100644 --- a/bundle/schema/schema_test.go +++ b/bundle/schema/schema_test.go @@ -20,7 +20,7 @@ func TestIntSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }` @@ -47,7 +47,7 @@ func TestBooleanSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }` @@ -123,7 +123,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -134,7 +134,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -145,7 +145,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -156,7 +156,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -167,7 +167,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -178,7 +178,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -189,7 +189,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -200,7 +200,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -214,7 +214,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -225,7 +225,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -236,7 +236,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -247,7 +247,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -258,7 +258,7 @@ func TestStructOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -326,7 +326,7 @@ func TestStructOfStructsSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -391,7 +391,7 @@ func TestStructOfMapsSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -481,7 +481,7 @@ func TestMapOfPrimitivesSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -518,7 +518,7 @@ func TestMapOfStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -556,7 +556,7 @@ func TestMapOfMapSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -661,7 +661,7 @@ func TestSliceOfMapSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -699,7 +699,7 @@ func TestSliceOfStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -757,7 +757,7 @@ func TestEmbeddedStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -797,7 +797,7 @@ func TestEmbeddedStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -892,7 +892,7 @@ func TestNonAnnotatedFieldsAreSkipped(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -934,7 +934,7 @@ func TestDashFieldsAreSkipped(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -987,7 +987,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1004,7 +1004,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1018,7 +1018,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1035,7 +1035,7 @@ func TestPointerInStructSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1106,7 +1106,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1129,7 +1129,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1157,7 +1157,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1180,7 +1180,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1210,7 +1210,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1236,7 +1236,7 @@ func TestGenericSchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1322,7 +1322,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1333,7 +1333,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1347,7 +1347,7 @@ func TestFieldsWithoutOmitEmptyAreRequired(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1429,7 +1429,7 @@ func TestDocIngestionForObject(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1512,7 +1512,7 @@ func TestDocIngestionForSlice(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1524,7 +1524,7 @@ func TestDocIngestionForSlice(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1611,7 +1611,7 @@ func TestDocIngestionForMap(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1623,7 +1623,7 @@ func TestDocIngestionForMap(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1683,7 +1683,7 @@ func TestDocIngestionForTopLevelPrimitive(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] } @@ -1761,7 +1761,7 @@ func TestInterfaceGeneratesEmptySchema(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1810,7 +1810,7 @@ func TestBundleReadOnlytag(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, @@ -1870,7 +1870,7 @@ func TestBundleInternalTag(t *testing.T) { }, { "type": "string", - "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\\}" + "pattern": "\\$\\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\\[[0-9]+\\])*)*(\\[[0-9]+\\])*)\\}" } ] }, diff --git a/bundle/tests/complex_variables_test.go b/bundle/tests/complex_variables_test.go new file mode 100644 index 00000000..ffe80e41 --- /dev/null +++ b/bundle/tests/complex_variables_test.go @@ -0,0 +1,62 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/stretchr/testify/require" +) + +func TestComplexVariables(t *testing.T) { + b, diags := loadTargetWithDiags("variables/complex", "default") + require.Empty(t, diags) + + diags = bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), + mutator.ResolveVariableReferences( + "variables", + ), + )) + require.NoError(t, diags.Error()) + + require.Equal(t, "13.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 2, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) + require.Equal(t, "true", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) + + require.Equal(t, 3, len(b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries)) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Jar: "/path/to/jar", + }) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Egg: "/path/to/egg", + }) + require.Contains(t, b.Config.Resources.Jobs["my_job"].Tasks[0].Libraries, compute.Library{ + Whl: "/path/to/whl", + }) + + require.Equal(t, "task with spark version 13.2.x-scala2.11 and jar /path/to/jar", b.Config.Resources.Jobs["my_job"].Tasks[0].TaskKey) +} + +func TestComplexVariablesOverride(t *testing.T) { + b, diags := loadTargetWithDiags("variables/complex", "dev") + require.Empty(t, diags) + + diags = bundle.Apply(context.Background(), b, bundle.Seq( + mutator.SetVariables(), + mutator.ResolveVariableReferencesInComplexVariables(), + mutator.ResolveVariableReferences( + "variables", + ), + )) + require.NoError(t, diags.Error()) + + require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) + require.Equal(t, "Standard_DS3_v3", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) + require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) + require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) +} diff --git a/bundle/tests/variables/complex/databricks.yml b/bundle/tests/variables/complex/databricks.yml new file mode 100644 index 00000000..f7535ad4 --- /dev/null +++ b/bundle/tests/variables/complex/databricks.yml @@ -0,0 +1,49 @@ +bundle: + name: complex-variables + +resources: + jobs: + my_job: + job_clusters: + - job_cluster_key: key + new_cluster: ${var.cluster} + tasks: + - task_key: test + job_cluster_key: key + libraries: ${variables.libraries.value} + task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}" + +variables: + node_type: + default: "Standard_DS3_v2" + cluster: + type: complex + description: "A cluster definition" + default: + spark_version: "13.2.x-scala2.11" + node_type_id: ${var.node_type} + num_workers: 2 + spark_conf: + spark.speculation: true + spark.databricks.delta.retentionDurationCheck.enabled: false + libraries: + type: complex + description: "A libraries definition" + default: + - jar: "/path/to/jar" + - egg: "/path/to/egg" + - whl: "/path/to/whl" + + +targets: + default: + dev: + variables: + node_type: "Standard_DS3_v3" + cluster: + spark_version: "14.2.x-scala2.11" + node_type_id: ${var.node_type} + num_workers: 4 + spark_conf: + spark.speculation: false + spark.databricks.delta.retentionDurationCheck.enabled: false diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 09441483..7cf0f72f 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -109,8 +109,8 @@ func TestVariablesWithoutDefinition(t *testing.T) { require.NoError(t, diags.Error()) require.True(t, b.Config.Variables["a"].HasValue()) require.True(t, b.Config.Variables["b"].HasValue()) - assert.Equal(t, "foo", *b.Config.Variables["a"].Value) - assert.Equal(t, "bar", *b.Config.Variables["b"].Value) + assert.Equal(t, "foo", b.Config.Variables["a"].Value) + assert.Equal(t, "bar", b.Config.Variables["b"].Value) } func TestVariablesWithTargetLookupOverrides(t *testing.T) { @@ -140,9 +140,9 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) { )) require.NoError(t, diags.Error()) - assert.Equal(t, "4321", *b.Config.Variables["d"].Value) - assert.Equal(t, "1234", *b.Config.Variables["e"].Value) - assert.Equal(t, "9876", *b.Config.Variables["f"].Value) + assert.Equal(t, "4321", b.Config.Variables["d"].Value) + assert.Equal(t, "1234", b.Config.Variables["e"].Value) + assert.Equal(t, "9876", b.Config.Variables["f"].Value) } func TestVariableTargetOverrides(t *testing.T) { diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index af49a07a..15c5b797 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -81,6 +81,11 @@ func fromTyped(src any, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptions) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindMap, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) @@ -100,8 +105,13 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio refv = dyn.NilValue } + var options []fromTypedOptions + if v.Kind() == reflect.Interface { + options = append(options, includeZeroValues) + } + // Convert the field taking into account the reference value (may be equal to config.NilValue). - nv, err := fromTyped(v.Interface(), refv) + nv, err := fromTyped(v.Interface(), refv, options...) if err != nil { return dyn.InvalidValue, err } @@ -127,6 +137,11 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindMap, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) @@ -170,6 +185,11 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { func fromTypedSlice(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Check that the reference value is compatible or nil. switch ref.Kind() { + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(ref.MustString()) { + return ref, nil + } case dyn.KindSequence, dyn.KindNil: default: return dyn.InvalidValue, fmt.Errorf("unhandled type: %s", ref.Kind()) diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index e5447fe8..ed0c11ca 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -662,6 +662,42 @@ func TestFromTypedFloatTypeError(t *testing.T) { require.Error(t, err) } +func TestFromTypedAny(t *testing.T) { + type Tmp struct { + Foo any `json:"foo"` + Bar any `json:"bar"` + Foz any `json:"foz"` + Baz any `json:"baz"` + } + + src := Tmp{ + Foo: "foo", + Bar: false, + Foz: 0, + Baz: map[string]any{ + "foo": "foo", + "bar": 1234, + "qux": 0, + "nil": nil, + }, + } + + ref := dyn.NilValue + nv, err := FromTyped(src, ref) + require.NoError(t, err) + assert.Equal(t, dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V(false), + "foz": dyn.V(int64(0)), + "baz": dyn.V(map[string]dyn.Value{ + "foo": dyn.V("foo"), + "bar": dyn.V(int64(1234)), + "qux": dyn.V(int64(0)), + "nil": dyn.V(nil), + }), + }), nv) +} + func TestFromTypedAnyNil(t *testing.T) { var src any = nil var ref = dyn.NilValue diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 35d4d821..ad82e20e 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -56,6 +56,8 @@ func (n normalizeOptions) normalizeType(typ reflect.Type, src dyn.Value, seen [] return n.normalizeInt(typ, src, path) case reflect.Float32, reflect.Float64: return n.normalizeFloat(typ, src, path) + case reflect.Interface: + return n.normalizeInterface(typ, src, path) } return dyn.InvalidValue, diag.Errorf("unsupported type: %s", typ.Kind()) @@ -166,8 +168,15 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a struct. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path)) } @@ -197,8 +206,15 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a map. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindMap, src, path)) } @@ -225,8 +241,15 @@ func (n normalizeOptions) normalizeSlice(typ reflect.Type, src dyn.Value, seen [ return dyn.NewValue(out, src.Location()), diags case dyn.KindNil: return src, diags + + case dyn.KindString: + // Return verbatim if it's a pure variable reference. + if dynvar.IsPureVariableReference(src.MustString()) { + return src, nil + } } + // Cannot interpret as a slice. return dyn.InvalidValue, diags.Append(typeMismatch(dyn.KindSequence, src, path)) } @@ -371,3 +394,7 @@ func (n normalizeOptions) normalizeFloat(typ reflect.Type, src dyn.Value, path d return dyn.NewValue(out, src.Location()), diags } + +func (n normalizeOptions) normalizeInterface(typ reflect.Type, src dyn.Value, path dyn.Path) (dyn.Value, diag.Diagnostics) { + return src, nil +} diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index 843b4ea5..299ffcab 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -223,6 +223,52 @@ func TestNormalizeStructIncludeMissingFieldsOnRecursiveType(t *testing.T) { }), vout) } +func TestNormalizeStructVariableReference(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeStructRandomStringError(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeStructIntError(t *testing.T) { + type Tmp struct { + Foo string `json:"foo"` + } + + var typ Tmp + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeMap(t *testing.T) { var typ map[string]string vin := dyn.V(map[string]dyn.Value{ @@ -312,6 +358,40 @@ func TestNormalizeMapNestedError(t *testing.T) { ) } +func TestNormalizeMapVariableReference(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeMapRandomStringError(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeMapIntError(t *testing.T) { + var typ map[string]string + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected map, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeSlice(t *testing.T) { var typ []string vin := dyn.V([]dyn.Value{ @@ -400,6 +480,40 @@ func TestNormalizeSliceNestedError(t *testing.T) { ) } +func TestNormalizeSliceVariableReference(t *testing.T) { + var typ []string + vin := dyn.NewValue("${var.foo}", dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(typ, vin) + assert.Empty(t, err) + assert.Equal(t, vin, vout) +} + +func TestNormalizeSliceRandomStringError(t *testing.T) { + var typ []string + vin := dyn.NewValue("var foo", dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected sequence, found string`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + +func TestNormalizeSliceIntError(t *testing.T) { + var typ []string + vin := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + _, err := Normalize(typ, vin) + assert.Len(t, err, 1) + assert.Equal(t, diag.Diagnostic{ + Severity: diag.Warning, + Summary: `expected sequence, found int`, + Location: vin.Location(), + Path: dyn.EmptyPath, + }, err[0]) +} + func TestNormalizeString(t *testing.T) { var typ string vin := dyn.V("string") @@ -725,3 +839,29 @@ func TestNormalizeAnchors(t *testing.T) { "foo": "bar", }, vout.AsAny()) } + +func TestNormalizeBoolToAny(t *testing.T) { + var typ any + vin := dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue(false, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} + +func TestNormalizeIntToAny(t *testing.T) { + var typ any + vin := dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue(10, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} + +func TestNormalizeSliceToAny(t *testing.T) { + var typ any + v1 := dyn.NewValue(1, dyn.Location{File: "file", Line: 1, Column: 1}) + v2 := dyn.NewValue(2, dyn.Location{File: "file", Line: 1, Column: 1}) + vin := dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1}) + vout, err := Normalize(&typ, vin) + assert.Len(t, err, 0) + assert.Equal(t, dyn.NewValue([]dyn.Value{v1, v2}, dyn.Location{File: "file", Line: 1, Column: 1}), vout) +} diff --git a/libs/dyn/convert/to_typed.go b/libs/dyn/convert/to_typed.go index f10853a2..91d6445a 100644 --- a/libs/dyn/convert/to_typed.go +++ b/libs/dyn/convert/to_typed.go @@ -46,6 +46,8 @@ func ToTyped(dst any, src dyn.Value) error { return toTypedInt(dstv, src) case reflect.Float32, reflect.Float64: return toTypedFloat(dstv, src) + case reflect.Interface: + return toTypedInterface(dstv, src) } return fmt.Errorf("unsupported type: %s", dstv.Kind()) @@ -101,6 +103,12 @@ func toTypedStruct(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -132,6 +140,12 @@ func toTypedMap(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -157,6 +171,12 @@ func toTypedSlice(dst reflect.Value, src dyn.Value) error { case dyn.KindNil: dst.SetZero() return nil + case dyn.KindString: + // Ignore pure variable references (e.g. ${var.foo}). + if dynvar.IsPureVariableReference(src.MustString()) { + dst.SetZero() + return nil + } } return TypeError{ @@ -260,3 +280,8 @@ func toTypedFloat(dst reflect.Value, src dyn.Value) error { msg: fmt.Sprintf("expected a float, found a %s", src.Kind()), } } + +func toTypedInterface(dst reflect.Value, src dyn.Value) error { + dst.Set(reflect.ValueOf(src.AsAny())) + return nil +} diff --git a/libs/dyn/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go index 56d98a3c..5e37f286 100644 --- a/libs/dyn/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -511,3 +511,25 @@ func TestToTypedWithAliasKeyType(t *testing.T) { assert.Equal(t, "bar", out["foo"]) assert.Equal(t, "baz", out["bar"]) } + +func TestToTypedAnyWithBool(t *testing.T) { + var out any + err := ToTyped(&out, dyn.V(false)) + require.NoError(t, err) + assert.Equal(t, false, out) + + err = ToTyped(&out, dyn.V(true)) + require.NoError(t, err) + assert.Equal(t, true, out) +} + +func TestToTypedAnyWithMap(t *testing.T) { + var out any + v := dyn.V(map[string]dyn.Value{ + "foo": dyn.V("bar"), + "bar": dyn.V("baz"), + }) + err := ToTyped(&out, v) + require.NoError(t, err) + assert.Equal(t, map[string]any{"foo": "bar", "bar": "baz"}, out) +} diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go index e6340269..bf160fa8 100644 --- a/libs/dyn/dynvar/ref.go +++ b/libs/dyn/dynvar/ref.go @@ -6,7 +6,7 @@ import ( "github.com/databricks/cli/libs/dyn" ) -const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}` +const VariableRegex = `\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}` var re = regexp.MustCompile(VariableRegex) diff --git a/libs/dyn/dynvar/resolve_test.go b/libs/dyn/dynvar/resolve_test.go index bbecbb77..498322a4 100644 --- a/libs/dyn/dynvar/resolve_test.go +++ b/libs/dyn/dynvar/resolve_test.go @@ -247,3 +247,63 @@ func TestResolveWithInterpolateAliasedRef(t *testing.T) { assert.Equal(t, "a", getByPath(t, out, "b").MustString()) assert.Equal(t, "a", getByPath(t, out, "c").MustString()) } + +func TestResolveIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{dyn.V("a"), dyn.V("b")}), + "a": dyn.V("a: ${slice[0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveIndexedRefsFromMap(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "map": dyn.V( + map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{dyn.V("a")}), + }), + "a": dyn.V("a: ${map.slice[0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveMapFieldFromIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "map": dyn.V( + map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{ + dyn.V(map[string]dyn.Value{ + "value": dyn.V("a"), + }), + }), + }), + "a": dyn.V("a: ${map.slice[0].value}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +} + +func TestResolveNestedIndexedRefs(t *testing.T) { + in := dyn.V(map[string]dyn.Value{ + "slice": dyn.V([]dyn.Value{ + dyn.V([]dyn.Value{dyn.V("a")}), + }), + "a": dyn.V("a: ${slice[0][0]}"), + }) + + out, err := dynvar.Resolve(in, dynvar.DefaultLookup(in)) + require.NoError(t, err) + + assert.Equal(t, "a: a", getByPath(t, out, "a").MustString()) +}