acc: Use [VARNAME] instead of $VARNAME (#2282)

$VARNAME is what we use for environment variables, it's good to
separate.

Some people use envsubst for homemade variable interpolation, it's also
good to have separation there.
This commit is contained in:
Denis Bilenko 2025-02-03 15:10:19 +01:00 committed by GitHub
parent 838de2fde2
commit 9320bd1682
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
79 changed files with 324 additions and 329 deletions

View File

@ -102,13 +102,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
}
t.Setenv("CLI", execPath)
repls.SetPath(execPath, "$CLI")
repls.SetPath(execPath, "[CLI]")
// Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
tempHomeDir := t.TempDir()
repls.SetPath(tempHomeDir, "$TMPHOME")
repls.SetPath(tempHomeDir, "[TMPHOME]")
t.Logf("$TMPHOME=%v", tempHomeDir)
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
@ -133,7 +133,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
repls.SetPath(terraformrcPath, "$DATABRICKS_TF_CLI_CONFIG_FILE")
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
terraformExecPath := filepath.Join(buildDir, "terraform")
if runtime.GOOS == "windows" {
@ -141,10 +141,10 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
}
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
t.Setenv("TERRAFORM", terraformExecPath)
repls.SetPath(terraformExecPath, "$TERRAFORM")
repls.SetPath(terraformExecPath, "[TERRAFORM]")
// do it last so that full paths match first:
repls.SetPath(buildDir, "$BUILD_DIR")
repls.SetPath(buildDir, "[BUILD_DIR]")
workspaceClient, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
@ -226,7 +226,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
tmpDir = t.TempDir()
}
repls.SetPathWithParents(tmpDir, "$TMPDIR")
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
repls.Repls = append(repls.Repls, config.Repls...)
scriptContents := readMergedScriptContents(t, dir)

View File

@ -2,20 +2,20 @@
>>> chmod 000 .git
>>> $CLI bundle validate
>>> [CLI] bundle validate
Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
Found 1 error
Exit code: 1
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -24,7 +24,7 @@ Exit code: 1
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -38,12 +38,12 @@ Exit code: 1
>>> chmod 000 .git/HEAD
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
{
"bundle_root_path": "."
}
@ -53,7 +53,7 @@ Exit code: 1
>>> chmod 000 .git/config
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -62,7 +62,7 @@ Exit code: 1
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deploy --help
>>> [CLI] bundle deploy --help
Deploy bundle
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deployment --help
>>> [CLI] bundle deployment --help
Deployment related commands
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle destroy --help
>>> [CLI] bundle destroy --help
Destroy deployed bundle resources
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate dashboard --help
>>> [CLI] bundle generate dashboard --help
Generate configuration for a dashboard
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate job --help
>>> [CLI] bundle generate job --help
Generate bundle configuration for a job
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate pipeline --help
>>> [CLI] bundle generate pipeline --help
Generate bundle configuration for a pipeline
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate --help
>>> [CLI] bundle generate --help
Generate bundle configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle init --help
>>> [CLI] bundle init --help
Initialize using a bundle template.
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle open --help
>>> [CLI] bundle open --help
Open a resource in the browser
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle run --help
>>> [CLI] bundle run --help
Run the job or pipeline identified by KEY.
The KEY is the unique identifier of the resource to run. In addition to

View File

@ -1,5 +1,5 @@
>>> $CLI bundle schema --help
>>> [CLI] bundle schema --help
Generate JSON Schema for bundle configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle summary --help
>>> [CLI] bundle summary --help
Summarize resources deployed by this bundle
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle sync --help
>>> [CLI] bundle sync --help
Synchronize bundle tree to the workspace
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate --help
>>> [CLI] bundle validate --help
Validate configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle --help
>>> [CLI] bundle --help
Databricks Asset Bundles let you express data/AI/analytics projects as code.
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t default
>>> [CLI] bundle validate -o json -t default
{
"autoscale": {
"max_workers": 7,
@ -15,7 +15,7 @@
"spark_version": "13.3.x-scala2.12"
}
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"autoscale": {
"max_workers": 3,

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -27,12 +27,12 @@
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -27,21 +27,21 @@
}
}
>>> $CLI bundle validate -t development
>>> [CLI] bundle validate -t development
Name: override_job_cluster
Target: development
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
Validation OK!
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -64,11 +64,11 @@ Validation OK!
}
}
>>> $CLI bundle validate -t staging
>>> [CLI] bundle validate -t staging
Name: override_job_cluster
Target: staging
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json -t development
>>> errcode [CLI] bundle validate -o json -t development
Error: file ./test1.py not found

View File

@ -28,7 +28,7 @@
]
}
>>> errcode $CLI bundle validate -o json -t staging
>>> errcode [CLI] bundle validate -o json -t staging
Error: file ./test1.py not found
@ -63,14 +63,14 @@ Exit code: 1
]
}
>>> errcode $CLI bundle validate -t staging
>>> errcode [CLI] bundle validate -t staging
Error: file ./test1.py not found
Name: override_job_tasks
Target: staging
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_tasks/staging
Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
Warning: expected map, found string
at resources.clusters.my_cluster
in databricks.yml:6:17
@ -13,7 +13,7 @@ Warning: expected map, found string
}
}
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Warning: expected map, found string
at resources.clusters.my_cluster
in databricks.yml:6:17
@ -21,7 +21,7 @@ Warning: expected map, found string
Name: merge-string-map
Target: dev
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/merge-string-map/dev
Found 1 warning

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"clusters": [
@ -14,14 +14,14 @@
],
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/development/state/metadata.json"
},
"name": "job",
"permissions": []
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"clusters": [
@ -36,7 +36,7 @@
],
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/staging/state/metadata.json"
},
"name": "job",
"permissions": []

View File

@ -2,14 +2,14 @@
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
@ -19,7 +19,7 @@
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project"
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},

View File

@ -1,22 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook2"
}
}
]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json
>>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error
>>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: fallback
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/fallback/error
Found 1 error

View File

@ -2,14 +2,14 @@
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
@ -19,7 +19,7 @@
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project"
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
@ -68,7 +68,7 @@
"for_each_task": {
"task": {
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
}
}
},
@ -80,7 +80,7 @@
"task": {
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
}
}
},

View File

@ -1,22 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook2"
}
}
]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json
>>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error
>>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: nominal
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/nominal/error
Found 1 error

View File

@ -1,4 +1,4 @@
>>> $CLI bundle validate -t default -o json
>>> [CLI] bundle validate -t default -o json
>>> $CLI bundle validate -t override -o json
>>> [CLI] bundle validate -t override -o json

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"mode": "development",
"quality_monitors": {
@ -21,7 +21,7 @@
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"mode": null,
"quality_monitors": {
@ -46,7 +46,7 @@
}
}
>>> $CLI bundle validate -o json -t production
>>> [CLI] bundle validate -o json -t production
{
"mode": null,
"quality_monitors": {

View File

@ -1,5 +1,5 @@
>>> EXITCODE=0 errcode $CLI bundle validate
>>> EXITCODE=0 errcode [CLI] bundle validate
Executing 'preinit' script
from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr!
@ -9,12 +9,12 @@ from myscript.py 0 postinit: hello stderr!
Name: scripts
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/scripts/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/scripts/default
Validation OK!
>>> EXITCODE=1 errcode $CLI bundle validate
>>> EXITCODE=1 errcode [CLI] bundle validate
Executing 'preinit' script
from myscript.py 1 preinit: hello stdout!
from myscript.py 1 preinit: hello stderr!
@ -26,7 +26,7 @@ Found 1 error
Exit code: 1
>>> EXITCODE=0 errcode $CLI bundle deploy
>>> EXITCODE=0 errcode [CLI] bundle deploy
Executing 'preinit' script
from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr!
@ -42,7 +42,7 @@ from myscript.py 0 postbuild: hello stderr!
Executing 'predeploy' script
from myscript.py 0 predeploy: hello stdout!
from myscript.py 0 predeploy: hello stderr!
Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME.
Error: unable to deploy to /Workspace/Users/[USERNAME]/.bundle/scripts/default/state as [USERNAME].
Please make sure the current user or one of their groups is listed under the permissions of this bundle.
For assistance, contact the owners of this project.
They may need to redeploy the bundle to apply the new permissions.

View File

@ -1,10 +1,10 @@
Error: path "$TMPDIR" is not within repository root "$TMPDIR/myrepo"
Error: path "[TMPDIR]" is not within repository root "[TMPDIR]/myrepo"
Name: test-bundle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Found 1 error

View File

@ -1,7 +1,7 @@
Name: test-bundle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Validation OK!

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init dbt-sql --config-file ./input.json --output-dir output
>>> [CLI] bundle init dbt-sql --config-file ./input.json --output-dir output
Welcome to the dbt template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL
workspace_host: [DATABRICKS_URL]
📊 Your new project has been created in the 'my_dbt_sql' directory!
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
Refer to the README.md file for full "getting started" guide and production setup instructions.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_dbt_sql
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_dbt_sql
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/prod
Validation OK!

View File

@ -19,16 +19,16 @@ targets:
# See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html.
mode: development
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -5,7 +5,7 @@ fixed:
type: databricks
prompts:
host:
default: $DATABRICKS_HOST
default: [DATABRICKS_HOST]
token:
hint: 'personal access token to use, dapiXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
hide_input: true
@ -16,7 +16,7 @@ prompts:
hint: 'initial catalog'
default: main
schema:
hint: 'personal schema where dbt will build objects during development, example: $USERNAME'
hint: 'personal schema where dbt will build objects during development, example: [USERNAME]'
threads:
hint: 'threads to use during development, 1 or more'
type: 'int'

View File

@ -11,7 +11,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
tasks:

View File

@ -1,30 +1,30 @@
>>> $CLI bundle init default-python --config-file ./input.json --output-dir output
>>> [CLI] bundle init default-python --config-file ./input.json --output-dir output
Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): $DATABRICKS_URL
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_python' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_default_python
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_default_python
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/prod
Validation OK!

View File

@ -16,16 +16,16 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
tasks:
- task_key: notebook_task

View File

@ -23,7 +23,7 @@ setup(
# to ensure that changes to wheel package are picked up when used on all-purpose clusters
version=my_default_python.__version__ + "+" + local_version,
url="https://databricks.com",
author="$USERNAME",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init default-sql --config-file ./input.json --output-dir output
>>> [CLI] bundle init default-sql --config-file ./input.json --output-dir output
Welcome to the default SQL template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL
workspace_host: [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_sql' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_default_sql
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_default_sql
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/prod
Validation OK!

View File

@ -25,7 +25,7 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
variables:
warehouse_id: f00dcafe
catalog: main
@ -34,15 +34,15 @@ targets:
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
variables:
warehouse_id: f00dcafe
catalog: main
schema: default
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
parameters:
- name: catalog

View File

@ -1,28 +1,28 @@
>>> $CLI bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output
>>> [CLI] bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output
Welcome to (EXPERIMENTAL) "Jobs as code" template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): $DATABRICKS_URL
Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_jobs_as_code' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev --output json
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION
>>> [CLI] bundle validate -t dev --output json
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: [DEV_VERSION]
{
"jobs": {
"my_jobs_as_code_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"email_notifications": {
"on_failure": [
"$USERNAME"
"[USERNAME]"
]
},
"format": "MULTI_TASK",
@ -40,19 +40,19 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
}
],
"max_concurrent_runs": 4,
"name": "[dev $USERNAME] my_jobs_as_code_job",
"name": "[dev [USERNAME]] my_jobs_as_code_job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {
"dev": "$USERNAME"
"dev": "[USERNAME]"
},
"tasks": [
{
"job_cluster_key": "job_cluster",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/notebook"
},
"task_key": "notebook_task"
},

View File

@ -34,16 +34,16 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -17,7 +17,7 @@ my_jobs_as_code_job = Job.from_dict(
},
"email_notifications": {
"on_failure": [
"$USERNAME",
"[USERNAME]",
],
},
"tasks": [

View File

@ -1,4 +1,4 @@
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'...
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world [TMPDIR]_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '[TMPDIR]_GPARENT/world-123456'...
fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate --var a=one -o json
>>> errcode [CLI] bundle validate --var a=one -o json
{
"a": {
"default": "hello",
@ -7,7 +7,7 @@
}
}
>>> errcode $CLI bundle validate --var a=one --var a=two
>>> errcode [CLI] bundle validate --var a=one --var a=two
Error: failed to assign two to a: variable has already been assigned value: one
Name: arg-repeat

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/default/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
[
{
"task_key": "test default"

View File

@ -3,8 +3,8 @@ Error: no value assigned to required variable a. Assignment can be done using "-
Name: empty${var.a}
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/empty${var.a}/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/empty${var.a}/default
Found 1 error

View File

@ -1,27 +1,27 @@
>>> $CLI bundle validate -t env-with-single-variable-override -o json
>>> [CLI] bundle validate -t env-with-single-variable-override -o json
"default-a dev-b"
>>> $CLI bundle validate -t env-with-two-variable-overrides -o json
>>> [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a prod-b"
>>> BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json
>>> BUNDLE_VAR_b=env-var-b [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a env-var-b"
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
>>> errcode [CLI] bundle validate -t env-missing-a-required-variable-assignment
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: test bundle
Target: env-missing-a-required-variable-assignment
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test bundle/env-missing-a-required-variable-assignment
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test bundle/env-missing-a-required-variable-assignment
Found 1 error
Exit code: 1
>>> errcode $CLI bundle validate -t env-using-an-undefined-variable
>>> errcode [CLI] bundle validate -t env-using-an-undefined-variable
Error: variable c is not defined but is assigned a value
Name: test bundle
@ -30,7 +30,7 @@ Found 1 error
Exit code: 1
>>> $CLI bundle validate -t env-overrides-lookup -o json
>>> [CLI] bundle validate -t env-overrides-lookup -o json
{
"a": "default-a",
"b": "prod-b",

View File

@ -1,6 +1,6 @@
=== variable file
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster",
"new_cluster": {
@ -10,7 +10,7 @@
}
=== variable file and variable flag
>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
>>> [CLI] bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
@ -20,7 +20,7 @@
}
=== variable file and environment variable
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden [CLI] bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
@ -30,7 +30,7 @@
}
=== variable has value in config file
>>> $CLI bundle validate -o json --target with_value
>>> [CLI] bundle validate -o json --target with_value
{
"job_cluster_key": "mlops_stacks-cluster-from-file",
"new_cluster": {
@ -40,8 +40,8 @@
}
=== file cannot be parsed
>>> errcode $CLI bundle validate -o json --target invalid_json
Error: failed to parse variables file $TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
>>> errcode [CLI] bundle validate -o json --target invalid_json
Error: failed to parse variables file [TMPDIR]/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
Exit code: 1
@ -54,8 +54,8 @@ Exit code: 1
}
=== file has wrong structure
>>> errcode $CLI bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file $TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
>>> errcode [CLI] bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file [TMPDIR]/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
Variables file must be a JSON object with the following format:
{"var1": "value1", "var2": "value2"}
@ -71,7 +71,7 @@ Exit code: 1
}
=== file has variable that is complex but default is string
>>> errcode $CLI bundle validate -o json --target complex_to_string
>>> errcode [CLI] bundle validate -o json --target complex_to_string
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
@ -85,7 +85,7 @@ Exit code: 1
}
=== file has variable that is string but default is complex
>>> errcode $CLI bundle validate -o json --target string_to_complex
>>> errcode [CLI] bundle validate -o json --target string_to_complex
Error: variable cluster is of type complex, but the value in the variable file is not a complex type
@ -99,7 +99,7 @@ Exit code: 1
}
=== variable is required but it's not provided in the file
>>> errcode $CLI bundle validate -o json --target without_defaults
>>> errcode [CLI] bundle validate -o json --target without_defaults
Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file

View File

@ -1,8 +1,4 @@
# Fix for windows
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\wrong_file_structure\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json'
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\invalid_json\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json'
Old = '\\'
New = '/'

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"bundle": {
"environment": "prod",
@ -11,7 +11,7 @@
"name": "git",
"target": "prod",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"sync": {
@ -27,24 +27,24 @@
}
},
"workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/prod",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/state"
"artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/artifacts",
"file_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/files",
"resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/resources",
"root_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod",
"state_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/state"
}
}
>>> $CLI bundle validate
>>> [CLI] bundle validate
Name: git
Target: prod
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git/prod
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git/prod
Validation OK!
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
{
"bundle": {
"environment": "dev",
@ -56,7 +56,7 @@ Validation OK!
"name": "git",
"target": "dev",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"sync": {
@ -72,19 +72,19 @@ Validation OK!
}
},
"workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/dev",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/state"
"artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/artifacts",
"file_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/files",
"resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/resources",
"root_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev",
"state_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/state"
}
}
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: git
Target: dev
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git/dev
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git/dev
Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json
>>> errcode [CLI] bundle validate -o json
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
{
@ -25,7 +25,7 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara
}
Exit code: 1
>>> errcode $CLI bundle validate
>>> errcode [CLI] bundle validate
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
Name: host

View File

@ -7,7 +7,7 @@
},
"target": "dev",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"resources": {
@ -15,7 +15,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Users/$USERNAME/path/to/root/state/metadata.json"
"metadata_file_path": "/Users/[USERNAME]/path/to/root/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -29,7 +29,7 @@
"existing_cluster_id": "500",
"python_wheel_task": {
"named_parameters": {
"conf-file": "/Users/$USERNAME/path/to/root/files/path/to/config.yaml"
"conf-file": "/Users/[USERNAME]/path/to/root/files/path/to/config.yaml"
}
},
"task_key": ""
@ -46,17 +46,17 @@
"targets": null,
"variables": {
"workspace_root": {
"default": "/Users/$USERNAME",
"default": "/Users/[USERNAME]",
"description": "root directory in the Databricks workspace to store the asset bundle and associated artifacts",
"value": "/Users/$USERNAME"
"value": "/Users/[USERNAME]"
}
},
"workspace": {
"artifact_path": "/Users/$USERNAME/path/to/root/artifacts",
"file_path": "/Users/$USERNAME/path/to/root/files",
"artifact_path": "/Users/[USERNAME]/path/to/root/artifacts",
"file_path": "/Users/[USERNAME]/path/to/root/files",
"profile": "profile_name",
"resource_path": "/Users/$USERNAME/path/to/root/resources",
"root_path": "/Users/$USERNAME/path/to/root",
"state_path": "/Users/$USERNAME/path/to/root/state"
"resource_path": "/Users/[USERNAME]/path/to/root/resources",
"root_path": "/Users/[USERNAME]/path/to/root",
"state_path": "/Users/[USERNAME]/path/to/root/state"
}
}

View File

@ -20,7 +20,7 @@
"job1": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,15 +1,15 @@
>>> BUNDLE_VAR_b=def $CLI bundle validate -o json
>>> BUNDLE_VAR_b=def [CLI] bundle validate -o json
"abc def"
>>> errcode $CLI bundle validate
>>> errcode [CLI] bundle validate
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: ${var.a} ${var.b}
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/${var.a} ${var.b}/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/${var.a} ${var.b}/default
Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t use-default-variable-values
>>> [CLI] bundle validate -o json -t use-default-variable-values
{
"pipelines": {
"my_pipeline": {
@ -12,7 +12,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/use-default-variable-values/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/use-default-variable-values/state/metadata.json"
},
"name": "a_string",
"permissions": []
@ -20,7 +20,7 @@
}
}
>>> $CLI bundle validate -o json -t override-string-variable
>>> [CLI] bundle validate -o json -t override-string-variable
{
"pipelines": {
"my_pipeline": {
@ -33,7 +33,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-string-variable/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-string-variable/state/metadata.json"
},
"name": "overridden_string",
"permissions": []
@ -41,7 +41,7 @@
}
}
>>> $CLI bundle validate -o json -t override-int-variable
>>> [CLI] bundle validate -o json -t override-int-variable
{
"pipelines": {
"my_pipeline": {
@ -54,7 +54,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-int-variable/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-int-variable/state/metadata.json"
},
"name": "a_string",
"permissions": []
@ -62,7 +62,7 @@
}
}
>>> $CLI bundle validate -o json -t override-both-bool-and-string-variables
>>> [CLI] bundle validate -o json -t override-both-bool-and-string-variables
{
"pipelines": {
"my_pipeline": {
@ -75,7 +75,7 @@
"continuous": false,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
},
"name": "overridden_string",
"permissions": []

View File

@ -18,13 +18,13 @@ Exit code: 7
=== Capturing pwd
>>> python3 -c import os; print(os.getcwd())
$TMPDIR
[TMPDIR]
=== Capturing subdir
>>> mkdir -p subdir/a/b/c
>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd())
$TMPDIR/subdir/a/b/c
[TMPDIR]/subdir/a/b/c
=== Custom output files - everything starting with out is captured and compared
>>> echo HELLO
@ -35,5 +35,5 @@ CUSTOM_NUMBER_REGEX
123456
=== Testing --version
>>> $CLI --version
Databricks CLI v$DEV_VERSION
>>> [CLI] --version
Databricks CLI v[DEV_VERSION]

View File

@ -16,5 +16,5 @@ New = "CUSTOM_NUMBER_REGEX"
[[Repls]]
# Fix path with reverse slashes in the output for Windows.
Old = '\$TMPDIR\\subdir\\a\\b\\c'
New = '$$TMPDIR/subdir/a/b/c'
Old = 'TMPDIR]\\subdir\\a\\b\\c'
New = 'TMPDIR]/subdir/a/b/c'

View File

@ -1,5 +1,5 @@
>>> $TERRAFORM init -no-color -get=false
>>> [TERRAFORM] init -no-color -get=false
Initializing the backend...
@ -35,12 +35,12 @@ If you ever set or change modules or backend configuration for Terraform,
rerun this command to reinitialize your working directory. If you forget, other
commands will detect it and remind you to do so if necessary.
>>> $TERRAFORM plan -no-color
>>> [TERRAFORM] plan -no-color
data.databricks_current_user.me: Reading...
data.databricks_current_user.me: Read complete after (redacted) [id=$USER.Id]
data.databricks_current_user.me: Read complete after (redacted) [id=[USERID]]
Changes to Outputs:
+ username = "$USERNAME"
+ username = "[USERNAME]"
You can apply this plan to save these new output values to the Terraform
state, without changing any real infrastructure.

View File

@ -1,5 +1,5 @@
>>> $CLI jobs create --json {"name":"abc"}
>>> [CLI] jobs create --json {"name":"abc"}
{
"job_id":1111
}

View File

@ -1,4 +1,4 @@
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ/files...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -1,7 +1,7 @@
Name: basic
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/$UNIQUE_PRJ
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ
Validation OK!

View File

@ -1,6 +1,6 @@
Building project_name_$UNIQUE_PRJ...
Uploading project_name_$UNIQUE_PRJ-0.0.1+[NUMID].[NUMID]-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -1,6 +1,6 @@
Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): $DATABRICKS_URL
Workspace to use (auto-detected, edit in 'project_name_$UNIQUE_PRJ/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'project_name_$UNIQUE_PRJ' directory!

View File

@ -22,54 +22,54 @@
"resources/project_name_$UNIQUE_PRJ.pipeline.yml"
],
"workspace": {
"host": "$DATABRICKS_URL",
"host": "[DATABRICKS_URL]",
"current_user": {
"active": true,
"displayName": "$USERNAME",
"displayName": "[USERNAME]",
"emails": [
{
"primary": true,
"type": "work",
"value": "$USERNAME"
"value": "[USERNAME]"
}
],
"groups": [
{
"$ref": "Groups/$USER.Groups[0]",
"$ref": "Groups/[USERGROUP]",
"display": "team.engineering",
"type": "direct",
"value": "$USER.Groups[0]"
"value": "[USERGROUP]"
}
],
"id": "$USER.Id",
"id": "[USERID]",
"name": {
"familyName": "$USERNAME",
"givenName": "$USERNAME"
"familyName": "[USERNAME]",
"givenName": "[USERNAME]"
},
"schemas": [
"urn:ietf:params:scim:schemas:core:2.0:User",
"urn:ietf:params:scim:schemas:extension:workspace:2.0:User"
],
"short_name": "$USERNAME",
"userName": "$USERNAME"
"short_name": "[USERNAME]",
"userName": "[USERNAME]"
},
"root_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev",
"file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/resources",
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts",
"state_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state"
"root_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev",
"file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files",
"resource_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/resources",
"artifact_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/artifacts",
"state_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state"
},
"resources": {
"jobs": {
"project_name_$UNIQUE_PRJ_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"email_notifications": {
"on_failure": [
"$USERNAME"
"[USERNAME]"
]
},
"format": "MULTI_TASK",
@ -88,18 +88,18 @@
}
],
"max_concurrent_runs": 4,
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_job",
"name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_job",
"queue": {
"enabled": true
},
"tags": {
"dev": "$USERNAME"
"dev": "[USERNAME]"
},
"tasks": [
{
"job_cluster_key": "job_cluster",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/notebook"
},
"task_key": "notebook_task"
},
@ -140,31 +140,31 @@
"unit": "DAYS"
}
},
"url": "$DATABRICKS_URL/jobs/[NUMID]?o=[NUMID]"
"url": "[DATABRICKS_URL]/jobs/[NUMID]?o=[NUMID]"
}
},
"pipelines": {
"project_name_$UNIQUE_PRJ_pipeline": {
"catalog": "main",
"configuration": {
"bundle.sourcePath": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src"
"bundle.sourcePath": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src"
},
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/state/metadata.json"
},
"development": true,
"id": "[UUID]",
"libraries": [
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline"
"path": "/Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev/files/src/dlt_pipeline"
}
}
],
"name": "[dev $USERNAME] project_name_$UNIQUE_PRJ_pipeline",
"name": "[dev [USERNAME]] project_name_$UNIQUE_PRJ_pipeline",
"target": "project_name_$UNIQUE_PRJ_dev",
"url": "$DATABRICKS_URL/pipelines/[UUID]?o=[NUMID]"
"url": "[DATABRICKS_URL]/pipelines/[UUID]?o=[NUMID]"
}
}
},
@ -174,12 +174,12 @@
]
},
"presets": {
"name_prefix": "[dev $USERNAME] ",
"name_prefix": "[dev [USERNAME]] ",
"pipelines_development": true,
"trigger_pause_status": "PAUSED",
"jobs_max_concurrent_runs": 4,
"tags": {
"dev": "$USERNAME"
"dev": "[USERNAME]"
}
}
}
}

View File

@ -1,8 +1,8 @@
Name: project_name_$UNIQUE_PRJ
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/project_name_$UNIQUE_PRJ/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/project_name_$UNIQUE_PRJ/dev
Validation OK!

View File

@ -2,7 +2,6 @@ package testdiff
import (
"encoding/json"
"fmt"
"path/filepath"
"regexp"
"runtime"
@ -16,7 +15,7 @@ import (
)
const (
testerName = "$USERNAME"
testerName = "[USERNAME]"
)
var (
@ -140,25 +139,25 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
t.Helper()
// in some clouds (gcp) w.Config.Host includes "https://" prefix in others it's really just a host (azure)
host := strings.TrimPrefix(strings.TrimPrefix(w.Config.Host, "http://"), "https://")
r.Set("https://"+host, "$DATABRICKS_URL")
r.Set("http://"+host, "$DATABRICKS_URL")
r.Set(host, "$DATABRICKS_HOST")
r.Set(w.Config.ClusterID, "$DATABRICKS_CLUSTER_ID")
r.Set(w.Config.WarehouseID, "$DATABRICKS_WAREHOUSE_ID")
r.Set(w.Config.ServerlessComputeID, "$DATABRICKS_SERVERLESS_COMPUTE_ID")
r.Set(w.Config.AccountID, "$DATABRICKS_ACCOUNT_ID")
r.Set(w.Config.Username, "$DATABRICKS_USERNAME")
r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
r.Set(w.Config.AzureResourceID, "$DATABRICKS_AZURE_RESOURCE_ID")
r.Set("https://"+host, "[DATABRICKS_URL]")
r.Set("http://"+host, "[DATABRICKS_URL]")
r.Set(host, "[DATABRICKS_HOST]")
r.Set(w.Config.ClusterID, "[DATABRICKS_CLUSTER_ID]")
r.Set(w.Config.WarehouseID, "[DATABRICKS_WAREHOUSE_ID]")
r.Set(w.Config.ServerlessComputeID, "[DATABRICKS_SERVERLESS_COMPUTE_ID]")
r.Set(w.Config.AccountID, "[DATABRICKS_ACCOUNT_ID]")
r.Set(w.Config.Username, "[DATABRICKS_USERNAME]")
r.SetPath(w.Config.Profile, "[DATABRICKS_CONFIG_PROFILE]")
r.Set(w.Config.ConfigFile, "[DATABRICKS_CONFIG_FILE]")
r.Set(w.Config.GoogleServiceAccount, "[DATABRICKS_GOOGLE_SERVICE_ACCOUNT]")
r.Set(w.Config.AzureResourceID, "[DATABRICKS_AZURE_RESOURCE_ID]")
r.Set(w.Config.AzureClientID, testerName)
r.Set(w.Config.AzureTenantID, "$ARM_TENANT_ID")
r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
r.Set(w.Config.AzureTenantID, "[ARM_TENANT_ID]")
r.Set(w.Config.AzureEnvironment, "[ARM_ENVIRONMENT]")
r.Set(w.Config.ClientID, "[DATABRICKS_CLIENT_ID]")
r.SetPath(w.Config.DatabricksCliPath, "[DATABRICKS_CLI_PATH]")
// This is set to words like "path" that happen too frequently
// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")
// r.Set(w.Config.AuthType, "[DATABRICKS_AUTH_TYPE]")
}
func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.User) {
@ -179,14 +178,14 @@ func PrepareReplacementsUser(t testutil.TestingT, r *ReplacementsContext, u iam.
r.Set(iamutil.GetShortUserName(&u), testerName)
for ind, val := range u.Groups {
r.Set(val.Value, fmt.Sprintf("$USER.Groups[%d]", ind))
for _, val := range u.Groups {
r.Set(val.Value, "[USERGROUP]")
}
r.Set(u.Id, "$USER.Id")
r.Set(u.Id, "[USERID]")
for ind, val := range u.Roles {
r.Set(val.Value, fmt.Sprintf("$USER.Roles[%d]", ind))
for _, val := range u.Roles {
r.Set(val.Value, "[USERROLE]")
}
}
@ -207,5 +206,5 @@ func PrepareReplacementsTemporaryDirectory(t testutil.TestingT, r *ReplacementsC
func PrepareReplacementsDevVersion(t testutil.TestingT, r *ReplacementsContext) {
t.Helper()
r.append(devVersionRegex, "$$DEV_VERSION")
r.append(devVersionRegex, "[DEV_VERSION]")
}