Added support for double underscore variable references (#2203)

## Changes
Added support for double underscore variable references.

Previously we made this restriction stronger with no particular reason,
TF provider supports multiple underscores and thus DABs should do as
well.

Fixes #1753

## Tests
Added acceptance and integration tests
This commit is contained in:
Andrew Nester 2025-01-29 14:38:28 +01:00 committed by GitHub
parent 59d6fbfee9
commit ec7808da34
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 138 additions and 6 deletions

View File

@ -0,0 +1,14 @@
bundle:
name: double_underscore
variables:
double__underscore:
description: "This is a variable with a double underscore"
default: "default"
resources:
jobs:
test_job:
name: "test"
tasks:
- task_key: "test ${var.double__underscore}"

View File

@ -0,0 +1,7 @@
>>> $CLI bundle validate -o json
[
{
"task_key": "test default"
}
]

View File

@ -0,0 +1 @@
trace $CLI bundle validate -o json | jq .resources.jobs.test_job.tasks

View File

@ -6,7 +6,9 @@ import (
"testing"
"github.com/databricks/cli/integration/internal/acc"
"github.com/databricks/cli/internal/testcli"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/testdiff"
"github.com/google/uuid"
"github.com/stretchr/testify/require"
)
@ -35,3 +37,40 @@ func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {
// deploy empty bundle again
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
}
func TestBasicBundleDeployWithDoubleUnderscoreVariables(t *testing.T) {
ctx, wt := acc.WorkspaceTest(t)
nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root := initTestTemplate(t, ctx, "basic_with_variables", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
currentUser, err := wt.W.CurrentUser.Me(ctx)
require.NoError(t, err)
ctx, replacements := testdiff.WithReplacementsMap(ctx)
replacements.Set(uniqueId, "$UNIQUE_PRJ")
replacements.Set(currentUser.UserName, "$USERNAME")
t.Cleanup(func() {
destroyBundle(t, ctx, root)
})
testutil.Chdir(t, root)
testcli.AssertOutput(
t,
ctx,
[]string{"bundle", "validate"},
testutil.TestData("testdata/basic_with_variables/bundle_validate.txt"),
)
testcli.AssertOutput(
t,
ctx,
[]string{"bundle", "deploy", "--force-lock", "--auto-approve"},
testutil.TestData("testdata/basic_with_variables/bundle_deploy.txt"),
)
}

View File

@ -0,0 +1,21 @@
{
"properties": {
"unique_id": {
"type": "string",
"description": "Unique ID for job name"
},
"spark_version": {
"type": "string",
"description": "Spark version used for job cluster"
},
"node_type_id": {
"type": "string",
"description": "Node type id for job cluster"
},
"root_path": {
"type": "string",
"description": "Root path to deploy bundle to",
"default": ""
}
}
}

View File

@ -0,0 +1,32 @@
bundle:
name: basic
workspace:
{{ if .root_path }}
root_path: "{{.root_path}}/.bundle/{{.unique_id}}"
{{ else }}
root_path: "~/.bundle/{{.unique_id}}"
{{ end }}
variables:
task__key: # Note: the variable has double underscore
default: my_notebook_task
resources:
jobs:
foo__bar: # Note: the resource has double underscore to check that TF provider can use such names
name: test-job-basic-{{.unique_id}}
tasks:
- task_key: ${var.task__key}
new_cluster:
num_workers: 1
spark_version: "{{.spark_version}}"
node_type_id: "{{.node_type_id}}"
spark_python_task:
python_file: ./hello_world.py
foo:
name: test-job-basic-ref-{{.unique_id}}
tasks:
- task_key: job_task
run_job_task:
job_id: ${resources.jobs.foo__bar.id}

View File

@ -0,0 +1 @@
print("Hello World!")

View File

@ -0,0 +1,4 @@
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,7 @@
Name: basic
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ
Validation OK!

View File

@ -1,12 +1,16 @@
package dynvar
import (
"fmt"
"regexp"
"github.com/databricks/cli/libs/dyn"
)
var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`)
var (
baseVarDef = `[a-zA-Z]+([-_]*[a-zA-Z0-9]+)*`
re = regexp.MustCompile(fmt.Sprintf(`\$\{(%s(\.%s(\[[0-9]+\])*)*(\[[0-9]+\])*)\}`, baseVarDef, baseVarDef))
)
// ref represents a variable reference.
// It is a string [dyn.Value] contained in a larger [dyn.Value].

View File

@ -15,9 +15,13 @@ func TestNewRefNoString(t *testing.T) {
func TestNewRefValidPattern(t *testing.T) {
for in, refs := range map[string][]string{
"${hello_world.world_world}": {"hello_world.world_world"},
"${helloworld.world-world}": {"helloworld.world-world"},
"${hello-world.world-world}": {"hello-world.world-world"},
"${hello_world.world_world}": {"hello_world.world_world"},
"${helloworld.world-world}": {"helloworld.world-world"},
"${hello-world.world-world}": {"hello-world.world-world"},
"${hello_world.world__world}": {"hello_world.world__world"},
"${hello_world.world--world}": {"hello_world.world--world"},
"${hello_world.world-_world}": {"hello_world.world-_world"},
"${hello_world.world_-world}": {"hello_world.world_-world"},
} {
ref, ok := newRef(dyn.V(in))
require.True(t, ok, "should match valid pattern: %s", in)
@ -36,8 +40,6 @@ func TestNewRefInvalidPattern(t *testing.T) {
"${_-_._-_.id}", // cannot use _- in sequence
"${0helloworld.world-world}", // interpolated first section shouldn't start with number
"${helloworld.9world-world}", // interpolated second section shouldn't start with number
"${a-a.a-_a-a.id}", // fails because of -_ in the second segment
"${a-a.a--a-a.id}", // fails because of -- in the second segment
}
for _, v := range invalid {
_, ok := newRef(dyn.V(v))