acc: Use [VARNAME] instead of $VARNAME (#2282)

$VARNAME is what we use for environment variables, it's good to
separate.

Some people use envsubst for homemade variable interpolation, it's also
good to have separation there.
This commit is contained in:
Denis Bilenko 2025-02-03 15:10:19 +01:00 committed by GitHub
parent 838de2fde2
commit 9320bd1682
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
79 changed files with 324 additions and 329 deletions

View File

@ -102,13 +102,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
} }
t.Setenv("CLI", execPath) t.Setenv("CLI", execPath)
repls.SetPath(execPath, "$CLI") repls.SetPath(execPath, "[CLI]")
// Make helper scripts available // Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH"))) t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
tempHomeDir := t.TempDir() tempHomeDir := t.TempDir()
repls.SetPath(tempHomeDir, "$TMPHOME") repls.SetPath(tempHomeDir, "[TMPHOME]")
t.Logf("$TMPHOME=%v", tempHomeDir) t.Logf("$TMPHOME=%v", tempHomeDir)
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically // Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
@ -133,7 +133,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
terraformrcPath := filepath.Join(buildDir, ".terraformrc") terraformrcPath := filepath.Join(buildDir, ".terraformrc")
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath) t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath) t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
repls.SetPath(terraformrcPath, "$DATABRICKS_TF_CLI_CONFIG_FILE") repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
terraformExecPath := filepath.Join(buildDir, "terraform") terraformExecPath := filepath.Join(buildDir, "terraform")
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
@ -141,10 +141,10 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
} }
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath) t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
t.Setenv("TERRAFORM", terraformExecPath) t.Setenv("TERRAFORM", terraformExecPath)
repls.SetPath(terraformExecPath, "$TERRAFORM") repls.SetPath(terraformExecPath, "[TERRAFORM]")
// do it last so that full paths match first: // do it last so that full paths match first:
repls.SetPath(buildDir, "$BUILD_DIR") repls.SetPath(buildDir, "[BUILD_DIR]")
workspaceClient, err := databricks.NewWorkspaceClient() workspaceClient, err := databricks.NewWorkspaceClient()
require.NoError(t, err) require.NoError(t, err)
@ -226,7 +226,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
tmpDir = t.TempDir() tmpDir = t.TempDir()
} }
repls.SetPathWithParents(tmpDir, "$TMPDIR") repls.SetPathWithParents(tmpDir, "[TMPDIR]")
repls.Repls = append(repls.Repls, config.Repls...) repls.Repls = append(repls.Repls, config.Repls...)
scriptContents := readMergedScriptContents(t, dir) scriptContents := readMergedScriptContents(t, dir)

View File

@ -2,20 +2,20 @@
>>> chmod 000 .git >>> chmod 000 .git
>>> $CLI bundle validate >>> [CLI] bundle validate
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror Name: git-permerror
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
Found 1 error Found 1 error
Exit code: 1 Exit code: 1
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -24,7 +24,7 @@ Exit code: 1
"bundle_root_path": "." "bundle_root_path": "."
} }
>>> withdir subdir/a/b $CLI bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -38,12 +38,12 @@ Exit code: 1
>>> chmod 000 .git/HEAD >>> chmod 000 .git/HEAD
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
{ {
"bundle_root_path": "." "bundle_root_path": "."
} }
>>> withdir subdir/a/b $CLI bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
{ {
"bundle_root_path": "." "bundle_root_path": "."
} }
@ -53,7 +53,7 @@ Exit code: 1
>>> chmod 000 .git/config >>> chmod 000 .git/config
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied
@ -62,7 +62,7 @@ Exit code: 1
"bundle_root_path": "." "bundle_root_path": "."
} }
>>> withdir subdir/a/b $CLI bundle validate -o json >>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied Error: unable to load repository specific gitconfig: open config: permission denied

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deploy --help >>> [CLI] bundle deploy --help
Deploy bundle Deploy bundle
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deployment --help >>> [CLI] bundle deployment --help
Deployment related commands Deployment related commands
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle destroy --help >>> [CLI] bundle destroy --help
Destroy deployed bundle resources Destroy deployed bundle resources
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate dashboard --help >>> [CLI] bundle generate dashboard --help
Generate configuration for a dashboard Generate configuration for a dashboard
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate job --help >>> [CLI] bundle generate job --help
Generate bundle configuration for a job Generate bundle configuration for a job
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate pipeline --help >>> [CLI] bundle generate pipeline --help
Generate bundle configuration for a pipeline Generate bundle configuration for a pipeline
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate --help >>> [CLI] bundle generate --help
Generate bundle configuration Generate bundle configuration
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle init --help >>> [CLI] bundle init --help
Initialize using a bundle template. Initialize using a bundle template.
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following: TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle open --help >>> [CLI] bundle open --help
Open a resource in the browser Open a resource in the browser
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle run --help >>> [CLI] bundle run --help
Run the job or pipeline identified by KEY. Run the job or pipeline identified by KEY.
The KEY is the unique identifier of the resource to run. In addition to The KEY is the unique identifier of the resource to run. In addition to

View File

@ -1,5 +1,5 @@
>>> $CLI bundle schema --help >>> [CLI] bundle schema --help
Generate JSON Schema for bundle configuration Generate JSON Schema for bundle configuration
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle summary --help >>> [CLI] bundle summary --help
Summarize resources deployed by this bundle Summarize resources deployed by this bundle
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle sync --help >>> [CLI] bundle sync --help
Synchronize bundle tree to the workspace Synchronize bundle tree to the workspace
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate --help >>> [CLI] bundle validate --help
Validate configuration Validate configuration
Usage: Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle --help >>> [CLI] bundle --help
Databricks Asset Bundles let you express data/AI/analytics projects as code. Databricks Asset Bundles let you express data/AI/analytics projects as code.
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t default >>> [CLI] bundle validate -o json -t default
{ {
"autoscale": { "autoscale": {
"max_workers": 7, "max_workers": 7,
@ -15,7 +15,7 @@
"spark_version": "13.3.x-scala2.12" "spark_version": "13.3.x-scala2.12"
} }
>>> $CLI bundle validate -o json -t development >>> [CLI] bundle validate -o json -t development
{ {
"autoscale": { "autoscale": {
"max_workers": 3, "max_workers": 3,

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development >>> [CLI] bundle validate -o json -t development
{ {
"foo": { "foo": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -27,12 +27,12 @@
} }
} }
>>> $CLI bundle validate -o json -t staging >>> [CLI] bundle validate -o json -t staging
{ {
"foo": { "foo": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development >>> [CLI] bundle validate -o json -t development
{ {
"foo": { "foo": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -27,21 +27,21 @@
} }
} }
>>> $CLI bundle validate -t development >>> [CLI] bundle validate -t development
Name: override_job_cluster Name: override_job_cluster
Target: development Target: development
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
Validation OK! Validation OK!
>>> $CLI bundle validate -o json -t staging >>> [CLI] bundle validate -o json -t staging
{ {
"foo": { "foo": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -64,11 +64,11 @@ Validation OK!
} }
} }
>>> $CLI bundle validate -t staging >>> [CLI] bundle validate -t staging
Name: override_job_cluster Name: override_job_cluster
Target: staging Target: staging
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
Validation OK! Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json -t development >>> errcode [CLI] bundle validate -o json -t development
Error: file ./test1.py not found Error: file ./test1.py not found

View File

@ -28,7 +28,7 @@
] ]
} }
>>> errcode $CLI bundle validate -o json -t staging >>> errcode [CLI] bundle validate -o json -t staging
Error: file ./test1.py not found Error: file ./test1.py not found
@ -63,14 +63,14 @@ Exit code: 1
] ]
} }
>>> errcode $CLI bundle validate -t staging >>> errcode [CLI] bundle validate -t staging
Error: file ./test1.py not found Error: file ./test1.py not found
Name: override_job_tasks Name: override_job_tasks
Target: staging Target: staging
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging Path: /Workspace/Users/[USERNAME]/.bundle/override_job_tasks/staging
Found 1 error Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t dev >>> [CLI] bundle validate -o json -t dev
Warning: expected map, found string Warning: expected map, found string
at resources.clusters.my_cluster at resources.clusters.my_cluster
in databricks.yml:6:17 in databricks.yml:6:17
@ -13,7 +13,7 @@ Warning: expected map, found string
} }
} }
>>> $CLI bundle validate -t dev >>> [CLI] bundle validate -t dev
Warning: expected map, found string Warning: expected map, found string
at resources.clusters.my_cluster at resources.clusters.my_cluster
in databricks.yml:6:17 in databricks.yml:6:17
@ -21,7 +21,7 @@ Warning: expected map, found string
Name: merge-string-map Name: merge-string-map
Target: dev Target: dev
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev Path: /Workspace/Users/[USERNAME]/.bundle/merge-string-map/dev
Found 1 warning Found 1 warning

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development >>> [CLI] bundle validate -o json -t development
{ {
"foo": { "foo": {
"clusters": [ "clusters": [
@ -14,14 +14,14 @@
], ],
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/development/state/metadata.json"
}, },
"name": "job", "name": "job",
"permissions": [] "permissions": []
} }
} }
>>> $CLI bundle validate -o json -t staging >>> [CLI] bundle validate -o json -t staging
{ {
"foo": { "foo": {
"clusters": [ "clusters": [
@ -36,7 +36,7 @@
], ],
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/staging/state/metadata.json"
}, },
"name": "job", "name": "job",
"permissions": [] "permissions": []

View File

@ -2,14 +2,14 @@
{ {
"job_cluster_key": "default", "job_cluster_key": "default",
"notebook_task": { "notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook" "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
}, },
"task_key": "notebook_example" "task_key": "notebook_example"
}, },
{ {
"job_cluster_key": "default", "job_cluster_key": "default",
"spark_python_task": { "spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py" "python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
}, },
"task_key": "spark_python_example" "task_key": "spark_python_example"
}, },
@ -19,7 +19,7 @@
"dbt run", "dbt run",
"dbt run" "dbt run"
], ],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project" "project_directory": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/dbt_project"
}, },
"job_cluster_key": "default", "job_cluster_key": "default",
"task_key": "dbt_example" "task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default", "job_cluster_key": "default",
"sql_task": { "sql_task": {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql" "path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
}, },
"warehouse_id": "cafef00d" "warehouse_id": "cafef00d"
}, },

View File

@ -1,22 +1,22 @@
[ [
{ {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py" "path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file1.py"
} }
}, },
{ {
"notebook": { "notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1" "path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook1"
} }
}, },
{ {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py" "path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file2.py"
} }
}, },
{ {
"notebook": { "notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2" "path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook2"
} }
} }
] ]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json >>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error >>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb] to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: fallback Name: fallback
Target: error Target: error
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error Path: /Workspace/Users/[USERNAME]/.bundle/fallback/error
Found 1 error Found 1 error

View File

@ -2,14 +2,14 @@
{ {
"job_cluster_key": "default", "job_cluster_key": "default",
"notebook_task": { "notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook" "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
}, },
"task_key": "notebook_example" "task_key": "notebook_example"
}, },
{ {
"job_cluster_key": "default", "job_cluster_key": "default",
"spark_python_task": { "spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py" "python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
}, },
"task_key": "spark_python_example" "task_key": "spark_python_example"
}, },
@ -19,7 +19,7 @@
"dbt run", "dbt run",
"dbt run" "dbt run"
], ],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project" "project_directory": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/dbt_project"
}, },
"job_cluster_key": "default", "job_cluster_key": "default",
"task_key": "dbt_example" "task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default", "job_cluster_key": "default",
"sql_task": { "sql_task": {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql" "path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
}, },
"warehouse_id": "cafef00d" "warehouse_id": "cafef00d"
}, },
@ -68,7 +68,7 @@
"for_each_task": { "for_each_task": {
"task": { "task": {
"notebook_task": { "notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook" "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
} }
} }
}, },
@ -80,7 +80,7 @@
"task": { "task": {
"job_cluster_key": "default", "job_cluster_key": "default",
"spark_python_task": { "spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py" "python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
} }
} }
}, },

View File

@ -1,22 +1,22 @@
[ [
{ {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py" "path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file1.py"
} }
}, },
{ {
"notebook": { "notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1" "path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook1"
} }
}, },
{ {
"file": { "file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py" "path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file2.py"
} }
}, },
{ {
"notebook": { "notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2" "path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook2"
} }
} }
] ]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json >>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error >>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb] to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: nominal Name: nominal
Target: error Target: error
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error Path: /Workspace/Users/[USERNAME]/.bundle/nominal/error
Found 1 error Found 1 error

View File

@ -1,4 +1,4 @@
>>> $CLI bundle validate -t default -o json >>> [CLI] bundle validate -t default -o json
>>> $CLI bundle validate -t override -o json >>> [CLI] bundle validate -t override -o json

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development >>> [CLI] bundle validate -o json -t development
{ {
"mode": "development", "mode": "development",
"quality_monitors": { "quality_monitors": {
@ -21,7 +21,7 @@
} }
} }
>>> $CLI bundle validate -o json -t staging >>> [CLI] bundle validate -o json -t staging
{ {
"mode": null, "mode": null,
"quality_monitors": { "quality_monitors": {
@ -46,7 +46,7 @@
} }
} }
>>> $CLI bundle validate -o json -t production >>> [CLI] bundle validate -o json -t production
{ {
"mode": null, "mode": null,
"quality_monitors": { "quality_monitors": {

View File

@ -1,5 +1,5 @@
>>> EXITCODE=0 errcode $CLI bundle validate >>> EXITCODE=0 errcode [CLI] bundle validate
Executing 'preinit' script Executing 'preinit' script
from myscript.py 0 preinit: hello stdout! from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr! from myscript.py 0 preinit: hello stderr!
@ -9,12 +9,12 @@ from myscript.py 0 postinit: hello stderr!
Name: scripts Name: scripts
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/scripts/default Path: /Workspace/Users/[USERNAME]/.bundle/scripts/default
Validation OK! Validation OK!
>>> EXITCODE=1 errcode $CLI bundle validate >>> EXITCODE=1 errcode [CLI] bundle validate
Executing 'preinit' script Executing 'preinit' script
from myscript.py 1 preinit: hello stdout! from myscript.py 1 preinit: hello stdout!
from myscript.py 1 preinit: hello stderr! from myscript.py 1 preinit: hello stderr!
@ -26,7 +26,7 @@ Found 1 error
Exit code: 1 Exit code: 1
>>> EXITCODE=0 errcode $CLI bundle deploy >>> EXITCODE=0 errcode [CLI] bundle deploy
Executing 'preinit' script Executing 'preinit' script
from myscript.py 0 preinit: hello stdout! from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr! from myscript.py 0 preinit: hello stderr!
@ -42,7 +42,7 @@ from myscript.py 0 postbuild: hello stderr!
Executing 'predeploy' script Executing 'predeploy' script
from myscript.py 0 predeploy: hello stdout! from myscript.py 0 predeploy: hello stdout!
from myscript.py 0 predeploy: hello stderr! from myscript.py 0 predeploy: hello stderr!
Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME. Error: unable to deploy to /Workspace/Users/[USERNAME]/.bundle/scripts/default/state as [USERNAME].
Please make sure the current user or one of their groups is listed under the permissions of this bundle. Please make sure the current user or one of their groups is listed under the permissions of this bundle.
For assistance, contact the owners of this project. For assistance, contact the owners of this project.
They may need to redeploy the bundle to apply the new permissions. They may need to redeploy the bundle to apply the new permissions.

View File

@ -1,10 +1,10 @@
Error: path "$TMPDIR" is not within repository root "$TMPDIR/myrepo" Error: path "[TMPDIR]" is not within repository root "[TMPDIR]/myrepo"
Name: test-bundle Name: test-bundle
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Found 1 error Found 1 error

View File

@ -1,7 +1,7 @@
Name: test-bundle Name: test-bundle
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Validation OK! Validation OK!

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init dbt-sql --config-file ./input.json --output-dir output >>> [CLI] bundle init dbt-sql --config-file ./input.json --output-dir output
Welcome to the dbt template for Databricks Asset Bundles! Welcome to the dbt template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html. A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL workspace_host: [DATABRICKS_URL]
📊 Your new project has been created in the 'my_dbt_sql' directory! 📊 Your new project has been created in the 'my_dbt_sql' directory!
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started. If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
Refer to the README.md file for full "getting started" guide and production setup instructions. Refer to the README.md file for full "getting started" guide and production setup instructions.
>>> $CLI bundle validate -t dev >>> [CLI] bundle validate -t dev
Name: my_dbt_sql Name: my_dbt_sql
Target: dev Target: dev
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/dev Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/dev
Validation OK! Validation OK!
>>> $CLI bundle validate -t prod >>> [CLI] bundle validate -t prod
Name: my_dbt_sql Name: my_dbt_sql
Target: prod Target: prod
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/prod Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/prod
Validation OK! Validation OK!

View File

@ -19,16 +19,16 @@ targets:
# See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html. # See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html.
mode: development mode: development
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
prod: prod:
mode: production mode: production
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy. # We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target} root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions: permissions:
- user_name: $USERNAME - user_name: [USERNAME]
level: CAN_MANAGE level: CAN_MANAGE
run_as: run_as:
user_name: $USERNAME user_name: [USERNAME]

View File

@ -5,7 +5,7 @@ fixed:
type: databricks type: databricks
prompts: prompts:
host: host:
default: $DATABRICKS_HOST default: [DATABRICKS_HOST]
token: token:
hint: 'personal access token to use, dapiXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' hint: 'personal access token to use, dapiXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
hide_input: true hide_input: true
@ -16,7 +16,7 @@ prompts:
hint: 'initial catalog' hint: 'initial catalog'
default: main default: main
schema: schema:
hint: 'personal schema where dbt will build objects during development, example: $USERNAME' hint: 'personal schema where dbt will build objects during development, example: [USERNAME]'
threads: threads:
hint: 'threads to use during development, 1 or more' hint: 'threads to use during development, 1 or more'
type: 'int' type: 'int'

View File

@ -11,7 +11,7 @@ resources:
email_notifications: email_notifications:
on_failure: on_failure:
- $USERNAME - [USERNAME]
tasks: tasks:

View File

@ -1,30 +1,30 @@
>>> $CLI bundle init default-python --config-file ./input.json --output-dir output >>> [CLI] bundle init default-python --config-file ./input.json --output-dir output
Welcome to the default Python template for Databricks Asset Bundles! Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): $DATABRICKS_URL Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_python' directory! ✨ Your new project has been created in the 'my_default_python' directory!
Please refer to the README.md file for "getting started" instructions. Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev >>> [CLI] bundle validate -t dev
Name: my_default_python Name: my_default_python
Target: dev Target: dev
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/dev Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/dev
Validation OK! Validation OK!
>>> $CLI bundle validate -t prod >>> [CLI] bundle validate -t prod
Name: my_default_python Name: my_default_python
Target: prod Target: prod
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/prod Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/prod
Validation OK! Validation OK!

View File

@ -16,16 +16,16 @@ targets:
mode: development mode: development
default: true default: true
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
prod: prod:
mode: production mode: production
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy. # We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target} root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions: permissions:
- user_name: $USERNAME - user_name: [USERNAME]
level: CAN_MANAGE level: CAN_MANAGE
run_as: run_as:
user_name: $USERNAME user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications: email_notifications:
on_failure: on_failure:
- $USERNAME - [USERNAME]
tasks: tasks:
- task_key: notebook_task - task_key: notebook_task

View File

@ -23,7 +23,7 @@ setup(
# to ensure that changes to wheel package are picked up when used on all-purpose clusters # to ensure that changes to wheel package are picked up when used on all-purpose clusters
version=my_default_python.__version__ + "+" + local_version, version=my_default_python.__version__ + "+" + local_version,
url="https://databricks.com", url="https://databricks.com",
author="$USERNAME", author="[USERNAME]",
description="wheel file based on my_default_python/src", description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"), packages=find_packages(where="./src"),
package_dir={"": "src"}, package_dir={"": "src"},

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init default-sql --config-file ./input.json --output-dir output >>> [CLI] bundle init default-sql --config-file ./input.json --output-dir output
Welcome to the default SQL template for Databricks Asset Bundles! Welcome to the default SQL template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html. A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL workspace_host: [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_sql' directory! ✨ Your new project has been created in the 'my_default_sql' directory!
Please refer to the README.md file for "getting started" instructions. Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev >>> [CLI] bundle validate -t dev
Name: my_default_sql Name: my_default_sql
Target: dev Target: dev
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/dev Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/dev
Validation OK! Validation OK!
>>> $CLI bundle validate -t prod >>> [CLI] bundle validate -t prod
Name: my_default_sql Name: my_default_sql
Target: prod Target: prod
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/prod Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/prod
Validation OK! Validation OK!

View File

@ -25,7 +25,7 @@ targets:
mode: development mode: development
default: true default: true
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
variables: variables:
warehouse_id: f00dcafe warehouse_id: f00dcafe
catalog: main catalog: main
@ -34,15 +34,15 @@ targets:
prod: prod:
mode: production mode: production
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy. # We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target} root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
variables: variables:
warehouse_id: f00dcafe warehouse_id: f00dcafe
catalog: main catalog: main
schema: default schema: default
permissions: permissions:
- user_name: $USERNAME - user_name: [USERNAME]
level: CAN_MANAGE level: CAN_MANAGE
run_as: run_as:
user_name: $USERNAME user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications: email_notifications:
on_failure: on_failure:
- $USERNAME - [USERNAME]
parameters: parameters:
- name: catalog - name: catalog

View File

@ -1,28 +1,28 @@
>>> $CLI bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output >>> [CLI] bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output
Welcome to (EXPERIMENTAL) "Jobs as code" template for Databricks Asset Bundles! Welcome to (EXPERIMENTAL) "Jobs as code" template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): $DATABRICKS_URL Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_jobs_as_code' directory! ✨ Your new project has been created in the 'my_jobs_as_code' directory!
Please refer to the README.md file for "getting started" instructions. Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html. See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev --output json >>> [CLI] bundle validate -t dev --output json
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: [DEV_VERSION]
{ {
"jobs": { "jobs": {
"my_jobs_as_code_job": { "my_jobs_as_code_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"email_notifications": { "email_notifications": {
"on_failure": [ "on_failure": [
"$USERNAME" "[USERNAME]"
] ]
}, },
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -40,19 +40,19 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
} }
], ],
"max_concurrent_runs": 4, "max_concurrent_runs": 4,
"name": "[dev $USERNAME] my_jobs_as_code_job", "name": "[dev [USERNAME]] my_jobs_as_code_job",
"permissions": [], "permissions": [],
"queue": { "queue": {
"enabled": true "enabled": true
}, },
"tags": { "tags": {
"dev": "$USERNAME" "dev": "[USERNAME]"
}, },
"tasks": [ "tasks": [
{ {
"job_cluster_key": "job_cluster", "job_cluster_key": "job_cluster",
"notebook_task": { "notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/files/src/notebook" "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/notebook"
}, },
"task_key": "notebook_task" "task_key": "notebook_task"
}, },

View File

@ -34,16 +34,16 @@ targets:
mode: development mode: development
default: true default: true
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
prod: prod:
mode: production mode: production
workspace: workspace:
host: $DATABRICKS_URL host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy. # We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target} root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions: permissions:
- user_name: $USERNAME - user_name: [USERNAME]
level: CAN_MANAGE level: CAN_MANAGE
run_as: run_as:
user_name: $USERNAME user_name: [USERNAME]

View File

@ -17,7 +17,7 @@ my_jobs_as_code_job = Job.from_dict(
}, },
"email_notifications": { "email_notifications": {
"on_failure": [ "on_failure": [
"$USERNAME", "[USERNAME]",
], ],
}, },
"tasks": [ "tasks": [

View File

@ -1,4 +1,4 @@
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'... Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world [TMPDIR]_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '[TMPDIR]_GPARENT/world-123456'...
fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate --var a=one -o json >>> errcode [CLI] bundle validate --var a=one -o json
{ {
"a": { "a": {
"default": "hello", "default": "hello",
@ -7,7 +7,7 @@
} }
} }
>>> errcode $CLI bundle validate --var a=one --var a=two >>> errcode [CLI] bundle validate --var a=one --var a=two
Error: failed to assign two to a: variable has already been assigned value: one Error: failed to assign two to a: variable has already been assigned value: one
Name: arg-repeat Name: arg-repeat

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle Name: cycle
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning Found 1 warning

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle Name: cycle
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning Found 1 warning

View File

@ -4,7 +4,7 @@
"my_job": { "my_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/default/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/default/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",

View File

@ -4,7 +4,7 @@
"my_job": { "my_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/dev/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/dev/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json >>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar" "task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
>>> $CLI bundle validate -o json -t dev >>> [CLI] bundle validate -o json -t dev
>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json >>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar" "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"

View File

@ -4,7 +4,7 @@
"my_job": { "my_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables-multiple-files/dev/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
[ [
{ {
"task_key": "test default" "task_key": "test default"

View File

@ -3,8 +3,8 @@ Error: no value assigned to required variable a. Assignment can be done using "-
Name: empty${var.a} Name: empty${var.a}
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/empty${var.a}/default Path: /Workspace/Users/[USERNAME]/.bundle/empty${var.a}/default
Found 1 error Found 1 error

View File

@ -1,27 +1,27 @@
>>> $CLI bundle validate -t env-with-single-variable-override -o json >>> [CLI] bundle validate -t env-with-single-variable-override -o json
"default-a dev-b" "default-a dev-b"
>>> $CLI bundle validate -t env-with-two-variable-overrides -o json >>> [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a prod-b" "prod-a prod-b"
>>> BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json >>> BUNDLE_VAR_b=env-var-b [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a env-var-b" "prod-a env-var-b"
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment >>> errcode [CLI] bundle validate -t env-missing-a-required-variable-assignment
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: test bundle Name: test bundle
Target: env-missing-a-required-variable-assignment Target: env-missing-a-required-variable-assignment
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/test bundle/env-missing-a-required-variable-assignment Path: /Workspace/Users/[USERNAME]/.bundle/test bundle/env-missing-a-required-variable-assignment
Found 1 error Found 1 error
Exit code: 1 Exit code: 1
>>> errcode $CLI bundle validate -t env-using-an-undefined-variable >>> errcode [CLI] bundle validate -t env-using-an-undefined-variable
Error: variable c is not defined but is assigned a value Error: variable c is not defined but is assigned a value
Name: test bundle Name: test bundle
@ -30,7 +30,7 @@ Found 1 error
Exit code: 1 Exit code: 1
>>> $CLI bundle validate -t env-overrides-lookup -o json >>> [CLI] bundle validate -t env-overrides-lookup -o json
{ {
"a": "default-a", "a": "default-a",
"b": "prod-b", "b": "prod-b",

View File

@ -1,6 +1,6 @@
=== variable file === variable file
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
{ {
"job_cluster_key": "mlops_stacks-cluster", "job_cluster_key": "mlops_stacks-cluster",
"new_cluster": { "new_cluster": {
@ -10,7 +10,7 @@
} }
=== variable file and variable flag === variable file and variable flag
>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden >>> [CLI] bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
{ {
"job_cluster_key": "mlops_stacks-cluster-overriden", "job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": { "new_cluster": {
@ -20,7 +20,7 @@
} }
=== variable file and environment variable === variable file and environment variable
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json >>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden [CLI] bundle validate -o json
{ {
"job_cluster_key": "mlops_stacks-cluster-overriden", "job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": { "new_cluster": {
@ -30,7 +30,7 @@
} }
=== variable has value in config file === variable has value in config file
>>> $CLI bundle validate -o json --target with_value >>> [CLI] bundle validate -o json --target with_value
{ {
"job_cluster_key": "mlops_stacks-cluster-from-file", "job_cluster_key": "mlops_stacks-cluster-from-file",
"new_cluster": { "new_cluster": {
@ -40,8 +40,8 @@
} }
=== file cannot be parsed === file cannot be parsed
>>> errcode $CLI bundle validate -o json --target invalid_json >>> errcode [CLI] bundle validate -o json --target invalid_json
Error: failed to parse variables file $TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a') Error: failed to parse variables file [TMPDIR]/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
Exit code: 1 Exit code: 1
@ -54,8 +54,8 @@ Exit code: 1
} }
=== file has wrong structure === file has wrong structure
>>> errcode $CLI bundle validate -o json --target wrong_file_structure >>> errcode [CLI] bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file $TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format Error: failed to parse variables file [TMPDIR]/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
Variables file must be a JSON object with the following format: Variables file must be a JSON object with the following format:
{"var1": "value1", "var2": "value2"} {"var1": "value1", "var2": "value2"}
@ -71,7 +71,7 @@ Exit code: 1
} }
=== file has variable that is complex but default is string === file has variable that is complex but default is string
>>> errcode $CLI bundle validate -o json --target complex_to_string >>> errcode [CLI] bundle validate -o json --target complex_to_string
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
@ -85,7 +85,7 @@ Exit code: 1
} }
=== file has variable that is string but default is complex === file has variable that is string but default is complex
>>> errcode $CLI bundle validate -o json --target string_to_complex >>> errcode [CLI] bundle validate -o json --target string_to_complex
Error: variable cluster is of type complex, but the value in the variable file is not a complex type Error: variable cluster is of type complex, but the value in the variable file is not a complex type
@ -99,7 +99,7 @@ Exit code: 1
} }
=== variable is required but it's not provided in the file === variable is required but it's not provided in the file
>>> errcode $CLI bundle validate -o json --target without_defaults >>> errcode [CLI] bundle validate -o json --target without_defaults
Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file

View File

@ -1,8 +1,4 @@
# Fix for windows # Fix for windows
[[Repls]] [[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\wrong_file_structure\\variable-overrides.json' Old = '\\'
New = '$$TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json' New = '/'
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\invalid_json\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json'

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json >>> [CLI] bundle validate -o json
{ {
"bundle": { "bundle": {
"environment": "prod", "environment": "prod",
@ -11,7 +11,7 @@
"name": "git", "name": "git",
"target": "prod", "target": "prod",
"terraform": { "terraform": {
"exec_path": "$TERRAFORM" "exec_path": "[TERRAFORM]"
} }
}, },
"sync": { "sync": {
@ -27,24 +27,24 @@
} }
}, },
"workspace": { "workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/artifacts", "artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/files", "file_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/resources", "resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/prod", "root_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/state" "state_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/state"
} }
} }
>>> $CLI bundle validate >>> [CLI] bundle validate
Name: git Name: git
Target: prod Target: prod
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/git/prod Path: /Workspace/Users/[USERNAME]/.bundle/git/prod
Validation OK! Validation OK!
>>> $CLI bundle validate -o json -t dev >>> [CLI] bundle validate -o json -t dev
{ {
"bundle": { "bundle": {
"environment": "dev", "environment": "dev",
@ -56,7 +56,7 @@ Validation OK!
"name": "git", "name": "git",
"target": "dev", "target": "dev",
"terraform": { "terraform": {
"exec_path": "$TERRAFORM" "exec_path": "[TERRAFORM]"
} }
}, },
"sync": { "sync": {
@ -72,19 +72,19 @@ Validation OK!
} }
}, },
"workspace": { "workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/artifacts", "artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/files", "file_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/resources", "resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/dev", "root_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/state" "state_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/state"
} }
} }
>>> $CLI bundle validate -t dev >>> [CLI] bundle validate -t dev
Name: git Name: git
Target: dev Target: dev
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/git/dev Path: /Workspace/Users/[USERNAME]/.bundle/git/dev
Validation OK! Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json >>> errcode [CLI] bundle validate -o json
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
{ {
@ -25,7 +25,7 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara
} }
Exit code: 1 Exit code: 1
>>> errcode $CLI bundle validate >>> errcode [CLI] bundle validate
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
Name: host Name: host

View File

@ -7,7 +7,7 @@
}, },
"target": "dev", "target": "dev",
"terraform": { "terraform": {
"exec_path": "$TERRAFORM" "exec_path": "[TERRAFORM]"
} }
}, },
"resources": { "resources": {
@ -15,7 +15,7 @@
"my_job": { "my_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Users/$USERNAME/path/to/root/state/metadata.json" "metadata_file_path": "/Users/[USERNAME]/path/to/root/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -29,7 +29,7 @@
"existing_cluster_id": "500", "existing_cluster_id": "500",
"python_wheel_task": { "python_wheel_task": {
"named_parameters": { "named_parameters": {
"conf-file": "/Users/$USERNAME/path/to/root/files/path/to/config.yaml" "conf-file": "/Users/[USERNAME]/path/to/root/files/path/to/config.yaml"
} }
}, },
"task_key": "" "task_key": ""
@ -46,17 +46,17 @@
"targets": null, "targets": null,
"variables": { "variables": {
"workspace_root": { "workspace_root": {
"default": "/Users/$USERNAME", "default": "/Users/[USERNAME]",
"description": "root directory in the Databricks workspace to store the asset bundle and associated artifacts", "description": "root directory in the Databricks workspace to store the asset bundle and associated artifacts",
"value": "/Users/$USERNAME" "value": "/Users/[USERNAME]"
} }
}, },
"workspace": { "workspace": {
"artifact_path": "/Users/$USERNAME/path/to/root/artifacts", "artifact_path": "/Users/[USERNAME]/path/to/root/artifacts",
"file_path": "/Users/$USERNAME/path/to/root/files", "file_path": "/Users/[USERNAME]/path/to/root/files",
"profile": "profile_name", "profile": "profile_name",
"resource_path": "/Users/$USERNAME/path/to/root/resources", "resource_path": "/Users/[USERNAME]/path/to/root/resources",
"root_path": "/Users/$USERNAME/path/to/root", "root_path": "/Users/[USERNAME]/path/to/root",
"state_path": "/Users/$USERNAME/path/to/root/state" "state_path": "/Users/[USERNAME]/path/to/root/state"
} }
} }

View File

@ -20,7 +20,7 @@
"job1": { "job1": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"format": "MULTI_TASK", "format": "MULTI_TASK",

View File

@ -1,15 +1,15 @@
>>> BUNDLE_VAR_b=def $CLI bundle validate -o json >>> BUNDLE_VAR_b=def [CLI] bundle validate -o json
"abc def" "abc def"
>>> errcode $CLI bundle validate >>> errcode [CLI] bundle validate
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: ${var.a} ${var.b} Name: ${var.a} ${var.b}
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/${var.a} ${var.b}/default Path: /Workspace/Users/[USERNAME]/.bundle/${var.a} ${var.b}/default
Found 1 error Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t use-default-variable-values >>> [CLI] bundle validate -o json -t use-default-variable-values
{ {
"pipelines": { "pipelines": {
"my_pipeline": { "my_pipeline": {
@ -12,7 +12,7 @@
"continuous": true, "continuous": true,
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/use-default-variable-values/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/use-default-variable-values/state/metadata.json"
}, },
"name": "a_string", "name": "a_string",
"permissions": [] "permissions": []
@ -20,7 +20,7 @@
} }
} }
>>> $CLI bundle validate -o json -t override-string-variable >>> [CLI] bundle validate -o json -t override-string-variable
{ {
"pipelines": { "pipelines": {
"my_pipeline": { "my_pipeline": {
@ -33,7 +33,7 @@
"continuous": true, "continuous": true,
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-string-variable/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-string-variable/state/metadata.json"
}, },
"name": "overridden_string", "name": "overridden_string",
"permissions": [] "permissions": []
@ -41,7 +41,7 @@
} }
} }
>>> $CLI bundle validate -o json -t override-int-variable >>> [CLI] bundle validate -o json -t override-int-variable
{ {
"pipelines": { "pipelines": {
"my_pipeline": { "my_pipeline": {
@ -54,7 +54,7 @@
"continuous": true, "continuous": true,
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-int-variable/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-int-variable/state/metadata.json"
}, },
"name": "a_string", "name": "a_string",
"permissions": [] "permissions": []
@ -62,7 +62,7 @@
} }
} }
>>> $CLI bundle validate -o json -t override-both-bool-and-string-variables >>> [CLI] bundle validate -o json -t override-both-bool-and-string-variables
{ {
"pipelines": { "pipelines": {
"my_pipeline": { "my_pipeline": {
@ -75,7 +75,7 @@
"continuous": false, "continuous": false,
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
}, },
"name": "overridden_string", "name": "overridden_string",
"permissions": [] "permissions": []

View File

@ -18,13 +18,13 @@ Exit code: 7
=== Capturing pwd === Capturing pwd
>>> python3 -c import os; print(os.getcwd()) >>> python3 -c import os; print(os.getcwd())
$TMPDIR [TMPDIR]
=== Capturing subdir === Capturing subdir
>>> mkdir -p subdir/a/b/c >>> mkdir -p subdir/a/b/c
>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd()) >>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd())
$TMPDIR/subdir/a/b/c [TMPDIR]/subdir/a/b/c
=== Custom output files - everything starting with out is captured and compared === Custom output files - everything starting with out is captured and compared
>>> echo HELLO >>> echo HELLO
@ -35,5 +35,5 @@ CUSTOM_NUMBER_REGEX
123456 123456
=== Testing --version === Testing --version
>>> $CLI --version >>> [CLI] --version
Databricks CLI v$DEV_VERSION Databricks CLI v[DEV_VERSION]

View File

@ -16,5 +16,5 @@ New = "CUSTOM_NUMBER_REGEX"
[[Repls]] [[Repls]]
# Fix path with reverse slashes in the output for Windows. # Fix path with reverse slashes in the output for Windows.
Old = '\$TMPDIR\\subdir\\a\\b\\c' Old = 'TMPDIR]\\subdir\\a\\b\\c'
New = '$$TMPDIR/subdir/a/b/c' New = 'TMPDIR]/subdir/a/b/c'

View File

@ -1,5 +1,5 @@
>>> $TERRAFORM init -no-color -get=false >>> [TERRAFORM] init -no-color -get=false
Initializing the backend... Initializing the backend...
@ -35,12 +35,12 @@ If you ever set or change modules or backend configuration for Terraform,
rerun this command to reinitialize your working directory. If you forget, other rerun this command to reinitialize your working directory. If you forget, other
commands will detect it and remind you to do so if necessary. commands will detect it and remind you to do so if necessary.
>>> $TERRAFORM plan -no-color >>> [TERRAFORM] plan -no-color
data.databricks_current_user.me: Reading... data.databricks_current_user.me: Reading...
data.databricks_current_user.me: Read complete after (redacted) [id=$USER.Id] data.databricks_current_user.me: Read complete after (redacted) [id=[USERID]]
Changes to Outputs: Changes to Outputs:
+ username = "$USERNAME" + username = "[USERNAME]"
You can apply this plan to save these new output values to the Terraform You can apply this plan to save these new output values to the Terraform
state, without changing any real infrastructure. state, without changing any real infrastructure.

View File

@ -1,5 +1,5 @@
>>> $CLI jobs create --json {"name":"abc"} >>> [CLI] jobs create --json {"name":"abc"}
{ {
"job_id":1111 "job_id":1111
} }

View File

@ -1,4 +1,4 @@
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ/files... Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ/files...
Deploying resources... Deploying resources...
Updating deployment state... Updating deployment state...
Deployment complete! Deployment complete!

View File

@ -1,7 +1,7 @@
Name: basic Name: basic
Target: default Target: default
Workspace: Workspace:
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ Path: /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ
Validation OK! Validation OK!

View File

@ -1,6 +1,6 @@
Building project_name_$UNIQUE_PRJ... Building project_name_$UNIQUE_PRJ...
Uploading project_name_$UNIQUE_PRJ-0.0.1+[NUMID].[NUMID]-py3-none-any.whl... Uploading project_name_$UNIQUE_PRJ-0.0.1+[NUMID].[NUMID]-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files... Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files...
Deploying resources... Deploying resources...
Updating deployment state... Updating deployment state...
Deployment complete! Deployment complete!

View File

@ -1,6 +1,6 @@
Welcome to the default Python template for Databricks Asset Bundles! Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): $DATABRICKS_URL Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'project_name_$UNIQUE_PRJ' directory! ✨ Your new project has been created in the 'project_name_$UNIQUE_PRJ' directory!

View File

@ -22,54 +22,54 @@
"resources/project_name_$UNIQUE_PRJ.pipeline.yml" "resources/project_name_$UNIQUE_PRJ.pipeline.yml"
], ],
"workspace": { "workspace": {
"host": "$DATABRICKS_URL", "host": "[DATABRICKS_URL]",
"current_user": { "current_user": {
"active": true, "active": true,
"displayName": "$USERNAME", "displayName": "[USERNAME]",
"emails": [ "emails": [
{ {
"primary": true, "primary": true,
"type": "work", "type": "work",
"value": "$USERNAME" "value": "[USERNAME]"
} }
], ],
"groups": [ "groups": [
{ {
"$ref": "Groups/$USER.Groups[0]", "$ref": "Groups/[USERGROUP]",
"display": "team.engineering", "display": "team.engineering",
"type": "direct", "type": "direct",
"value": "$USER.Groups[0]" "value": "[USERGROUP]"
} }
], ],
"id": "$USER.Id", "id": "[USERID]",
"name": { "name": {
"familyName": "$USERNAME", "familyName": "[USERNAME]",
"givenName": "$USERNAME" "givenName": "[USERNAME]"
}, },
"schemas": [ "schemas": [
"urn:ietf:params:scim:schemas:core:2.0:User", "urn:ietf:params:scim:schemas:core:2.0:User",
"urn:ietf:params:scim:schemas:extension:workspace:2.0:User" "urn:ietf:params:scim:schemas:extension:workspace:2.0:User"
], ],
"short_name": "$USERNAME", "short_name": "[USERNAME]",
"userName": "$USERNAME" "userName": "[USERNAME]"
}, },
"root_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev", "root_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev",
"file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files", "file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/resources", "resource_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/resources",
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts", "artifact_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts",
"state_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state" "state_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state"
}, },
"resources": { "resources": {
"jobs": { "jobs": {
"project_name_$UNIQUE_PRJ_job": { "project_name_$UNIQUE_PRJ_job": {
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
}, },
"edit_mode": "UI_LOCKED", "edit_mode": "UI_LOCKED",
"email_notifications": { "email_notifications": {
"on_failure": [ "on_failure": [
"$USERNAME" "[USERNAME]"
] ]
}, },
"format": "MULTI_TASK", "format": "MULTI_TASK",
@ -88,18 +88,18 @@
} }
], ],
"max_concurrent_runs": 4, "max_concurrent_runs": 4,
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_job", "name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_job",
"queue": { "queue": {
"enabled": true "enabled": true
}, },
"tags": { "tags": {
"dev": "$USERNAME" "dev": "[USERNAME]"
}, },
"tasks": [ "tasks": [
{ {
"job_cluster_key": "job_cluster", "job_cluster_key": "job_cluster",
"notebook_task": { "notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook" "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook"
}, },
"task_key": "notebook_task" "task_key": "notebook_task"
}, },
@ -140,31 +140,31 @@
"unit": "DAYS" "unit": "DAYS"
} }
}, },
"url": "$DATABRICKS_URL/jobs/[NUMID]?o=[NUMID]" "url": "[DATABRICKS_URL]/jobs/[NUMID]?o=[NUMID]"
} }
}, },
"pipelines": { "pipelines": {
"project_name_$UNIQUE_PRJ_pipeline": { "project_name_$UNIQUE_PRJ_pipeline": {
"catalog": "main", "catalog": "main",
"configuration": { "configuration": {
"bundle.sourcePath": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src" "bundle.sourcePath": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src"
}, },
"deployment": { "deployment": {
"kind": "BUNDLE", "kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json" "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
}, },
"development": true, "development": true,
"id": "[UUID]", "id": "[UUID]",
"libraries": [ "libraries": [
{ {
"notebook": { "notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline" "path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline"
} }
} }
], ],
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline", "name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_pipeline",
"target": "project_name_$UNIQUE_PRJ_dev", "target": "project_name_$UNIQUE_PRJ_dev",
"url": "$DATABRICKS_URL/pipelines/[UUID]?o=[NUMID]" "url": "[DATABRICKS_URL]/pipelines/[UUID]?o=[NUMID]"
} }
} }
}, },
@ -174,12 +174,12 @@
] ]
}, },
"presets": { "presets": {
"name_prefix": "[dev $USERNAME] ", "name_prefix": "[dev [USERNAME]] ",
"pipelines_development": true, "pipelines_development": true,
"trigger_pause_status": "PAUSED", "trigger_pause_status": "PAUSED",
"jobs_max_concurrent_runs": 4, "jobs_max_concurrent_runs": 4,
"tags": { "tags": {
"dev": "$USERNAME" "dev": "[USERNAME]"
} }
} }
} }

View File

@ -1,8 +1,8 @@
Name: project_name_$UNIQUE_PRJ Name: project_name_$UNIQUE_PRJ
Target: dev Target: dev
Workspace: Workspace:
Host: $DATABRICKS_URL Host: [DATABRICKS_URL]
User: $USERNAME User: [USERNAME]
Path: /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev Path: /Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev
Validation OK! Validation OK!

View File

@ -2,7 +2,6 @@ package testdiff
import ( import (
"encoding/json" "encoding/json"
"fmt"
"path/filepath" "path/filepath"
"regexp" "regexp"
"runtime" "runtime"
@ -16,7 +15,7 @@ import (
) )
const ( const (
testerName = "$USERNAME" testerName = "[USERNAME]"
) )
var ( var (
@ -140,25 +139,25 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
t.Helper() t.Helper()
// in some clouds (gcp) w.Config.Host includes "https://" prefix in others it's really just a host (azure) // in some clouds (gcp) w.Config.Host includes "https://" prefix in others it's really just a host (azure)
host := strings.TrimPrefix(strings.TrimPrefix(w.Config.Host, "http://"), "https://") host := strings.TrimPrefix(strings.TrimPrefix(w.Config.Host, "http://"), "https://")
r.Set("https://"+host, "$DATABRICKS_URL") r.Set("https://"+host, "[DATABRICKS_URL]")
r.Set("http://"+host, "$DATABRICKS_URL") r.Set("http://"+host, "[DATABRICKS_URL]")
r.Set(host, "$DATABRICKS_HOST") r.Set(host, "[DATABRICKS_HOST]")
r.Set(w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID") r.Set(w.Config.ClusterID, "[DATABRICKS_CLUSTER_ID]")
r.Set(w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID") r.Set(w.Config.WarehouseID, "[DATABRICKS_WAREHOUSE_ID]")
r.Set(w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID") r.Set(w.Config.ServerlessComputeID, "[DATABRICKS_SERVERLESS_COMPUTE_ID]")
r.Set(w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID") r.Set(w.Config.AccountID, "[DATABRICKS_ACCOUNT_ID]")
r.Set(w.Config.Username, "$DATABRICKS_USERNAME") r.Set(w.Config.Username, "[DATABRICKS_USERNAME]")
r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE") r.SetPath(w.Config.Profile, "[DATABRICKS_CONFIG_PROFILE]")
r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE") r.Set(w.Config.ConfigFile, "[DATABRICKS_CONFIG_FILE]")
r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT") r.Set(w.Config.GoogleServiceAccount, "[DATABRICKS_GOOGLE_SERVICE_ACCOUNT]")
r.Set(w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID") r.Set(w.Config.AzureResourceID, "[DATABRICKS_AZURE_RESOURCE_ID]")
r.Set(w.Config.AzureClientID, testerName) r.Set(w.Config.AzureClientID, testerName)
r.Set(w.Config.AzureTenantID, "$ARM_TENANT_ID") r.Set(w.Config.AzureTenantID, "[ARM_TENANT_ID]")
r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT") r.Set(w.Config.AzureEnvironment, "[ARM_ENVIRONMENT]")
r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID") r.Set(w.Config.ClientID, "[DATABRICKS_CLIENT_ID]")
r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH") r.SetPath(w.Config.DatabricksCliPath, "[DATABRICKS_CLI_PATH]")
// This is set to words like "path" that happen too frequently // This is set to words like "path" that happen too frequently
// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE") // r.Set(w.Config.AuthType, "[DATABRICKS_AUTH_TYPE]")
} }
func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.User) { func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.User) {
@ -179,14 +178,14 @@ func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.
r.Set(iamutil.GetShortUserName(&u), testerName) r.Set(iamutil.GetShortUserName(&u), testerName)
for ind, val := range u.Groups { for _, val := range u.Groups {
r.Set(val.Value, fmt.Sprintf("$USER.Groups[%d]", ind)) r.Set(val.Value, "[USERGROUP]")
} }
r.Set(u.Id, "$USER.Id") r.Set(u.Id, "[USERID]")
for ind, val := range u.Roles { for _, val := range u.Roles {
r.Set(val.Value, fmt.Sprintf("$USER.Roles[%d]", ind)) r.Set(val.Value, "[USERROLE]")
} }
} }
@ -207,5 +206,5 @@ func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsC
func PrepareReplacementsDevVersion(t testutil.TestingT, r *ReplacementsContext) { func PrepareReplacementsDevVersion(t testutil.TestingT, r *ReplacementsContext) {
t.Helper() t.Helper()
r.append(devVersionRegex, "$$DEV_VERSION") r.append(devVersionRegex, "[DEV_VERSION]")
} }