Merge remote-tracking branch 'origin' into async-logger-clean

This commit is contained in:
Shreyas Goenka 2025-02-05 15:20:34 +01:00
commit 1bb45377e0
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
115 changed files with 574 additions and 400 deletions

1
.github/CODEOWNERS vendored
View File

@ -1 +1,2 @@
* @pietern @andrewnester @shreyas-goenka @denik
cmd/labs @alexott @nfx

View File

@ -18,7 +18,7 @@ jobs:
pull-requests: write
steps:
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
with:
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.

View File

@ -20,7 +20,7 @@ jobs:
steps:
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -23,7 +23,7 @@ jobs:
steps:
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -50,7 +50,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
- name: Setup Python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
@ -82,7 +82,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: |
go.sum
@ -95,12 +95,12 @@ jobs:
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
with:
version: v1.63.4
args: --timeout=15m
- name: Run ruff
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
with:
version: "0.9.1"
args: "format --check"
@ -116,7 +116,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: |
go.sum

View File

@ -34,7 +34,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action

View File

@ -26,7 +26,7 @@ jobs:
- name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action

View File

@ -102,13 +102,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
}
t.Setenv("CLI", execPath)
repls.SetPath(execPath, "$CLI")
repls.SetPath(execPath, "[CLI]")
// Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
tempHomeDir := t.TempDir()
repls.SetPath(tempHomeDir, "$TMPHOME")
repls.SetPath(tempHomeDir, "[TMPHOME]")
t.Logf("$TMPHOME=%v", tempHomeDir)
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
@ -133,7 +133,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
repls.SetPath(terraformrcPath, "$DATABRICKS_TF_CLI_CONFIG_FILE")
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
terraformExecPath := filepath.Join(buildDir, "terraform")
if runtime.GOOS == "windows" {
@ -141,10 +141,10 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
}
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
t.Setenv("TERRAFORM", terraformExecPath)
repls.SetPath(terraformExecPath, "$TERRAFORM")
repls.SetPath(terraformExecPath, "[TERRAFORM]")
// do it last so that full paths match first:
repls.SetPath(buildDir, "$BUILD_DIR")
repls.SetPath(buildDir, "[BUILD_DIR]")
workspaceClient, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
@ -156,6 +156,8 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
testdiff.PrepareReplacementsUUID(t, &repls)
testdiff.PrepareReplacementsDevVersion(t, &repls)
testdiff.PrepareReplacementSdkVersion(t, &repls)
testdiff.PrepareReplacementsGoVersion(t, &repls)
testDirs := getTests(t)
require.NotEmpty(t, testDirs)
@ -226,7 +228,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
tmpDir = t.TempDir()
}
repls.SetPathWithParents(tmpDir, "$TMPDIR")
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
repls.Repls = append(repls.Repls, config.Repls...)
scriptContents := readMergedScriptContents(t, dir)
@ -253,6 +255,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if len(config.Server) > 0 || config.RecordRequests {
server = testserver.New(t)
server.RecordRequests = config.RecordRequests
server.IncludeRequestHeaders = config.IncludeRequestHeaders
// If no custom server stubs are defined, add the default handlers.
if len(config.Server) == 0 {
@ -261,8 +264,12 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
for _, stub := range config.Server {
require.NotEmpty(t, stub.Pattern)
server.Handle(stub.Pattern, func(req *http.Request) (resp any, err error) {
return stub.Response.Body, nil
server.Handle(stub.Pattern, func(req *http.Request) (any, int) {
statusCode := http.StatusOK
if stub.Response.StatusCode != 0 {
statusCode = stub.Response.StatusCode
}
return stub.Response.Body, statusCode
})
}
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+server.URL)

View File

@ -0,0 +1,5 @@
[DEFAULT]
host = $DATABRICKS_HOST
[profile_name]
host = https://test@non-existing-subdomain.databricks.com

View File

@ -0,0 +1,14 @@
bundle:
name: test-auth
workspace:
host: $DATABRICKS_HOST
targets:
dev:
default: true
workspace:
host: $DATABRICKS_HOST
prod:
workspace:
host: https://bar.com

View File

@ -0,0 +1,32 @@
=== Inside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Inside the bundle, target flags
>>> errcode [CLI] current-user me -t dev
"[USERNAME]"
=== Inside the bundle, target and matching profile
>>> errcode [CLI] current-user me -t dev -p DEFAULT
"[USERNAME]"
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
>>> errcode [CLI] current-user me -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
Exit code: 1
=== Inside the bundle, target and not matching profile
>>> errcode [CLI] current-user me -t dev -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
Exit code: 1
=== Outside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Outside the bundle, profile flag
>>> errcode [CLI] current-user me -p profile_name
"[USERNAME]"

View File

@ -0,0 +1,30 @@
# Replace placeholder with an actual host URL
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
envsubst < .databrickscfg > out && mv out .databrickscfg
export DATABRICKS_CONFIG_FILE=.databrickscfg
host=$DATABRICKS_HOST
unset DATABRICKS_HOST
title "Inside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Inside the bundle, target flags"
trace errcode $CLI current-user me -t dev | jq .userName
title "Inside the bundle, target and matching profile"
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
trace errcode $CLI current-user me -p profile_name | jq .userName
title "Inside the bundle, target and not matching profile"
trace errcode $CLI current-user me -t dev -p profile_name
cd ..
export DATABRICKS_HOST=$host
title "Outside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Outside the bundle, profile flag"
trace errcode $CLI current-user me -p profile_name | jq .userName

View File

@ -0,0 +1,8 @@
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
[[Repls]]
Old='DATABRICKS_HOST'
New='DATABRICKS_URL'

View File

@ -2,20 +2,20 @@
>>> chmod 000 .git
>>> $CLI bundle validate
>>> [CLI] bundle validate
Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
Found 1 error
Exit code: 1
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -24,7 +24,7 @@ Exit code: 1
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -38,12 +38,12 @@ Exit code: 1
>>> chmod 000 .git/HEAD
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
{
"bundle_root_path": "."
}
@ -53,7 +53,7 @@ Exit code: 1
>>> chmod 000 .git/config
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied
@ -62,7 +62,7 @@ Exit code: 1
"bundle_root_path": "."
}
>>> withdir subdir/a/b $CLI bundle validate -o json
>>> withdir subdir/a/b [CLI] bundle validate -o json
Error: unable to load repository specific gitconfig: open config: permission denied

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deploy --help
>>> [CLI] bundle deploy --help
Deploy bundle
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle deployment --help
>>> [CLI] bundle deployment --help
Deployment related commands
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle destroy --help
>>> [CLI] bundle destroy --help
Destroy deployed bundle resources
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate dashboard --help
>>> [CLI] bundle generate dashboard --help
Generate configuration for a dashboard
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate job --help
>>> [CLI] bundle generate job --help
Generate bundle configuration for a job
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate pipeline --help
>>> [CLI] bundle generate pipeline --help
Generate bundle configuration for a pipeline
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle generate --help
>>> [CLI] bundle generate --help
Generate bundle configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle init --help
>>> [CLI] bundle init --help
Initialize using a bundle template.
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle open --help
>>> [CLI] bundle open --help
Open a resource in the browser
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle run --help
>>> [CLI] bundle run --help
Run the job or pipeline identified by KEY.
The KEY is the unique identifier of the resource to run. In addition to

View File

@ -1,5 +1,5 @@
>>> $CLI bundle schema --help
>>> [CLI] bundle schema --help
Generate JSON Schema for bundle configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle summary --help
>>> [CLI] bundle summary --help
Summarize resources deployed by this bundle
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle sync --help
>>> [CLI] bundle sync --help
Synchronize bundle tree to the workspace
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate --help
>>> [CLI] bundle validate --help
Validate configuration
Usage:

View File

@ -1,5 +1,5 @@
>>> $CLI bundle --help
>>> [CLI] bundle --help
Databricks Asset Bundles let you express data/AI/analytics projects as code.
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html

View File

@ -1,7 +1,7 @@
Error: Files in the 'include' configuration section must be YAML files.
Error: Files in the 'include' configuration section must be YAML or JSON files.
in databricks.yml:5:4
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
Name: non_yaml_in_includes

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t default
>>> [CLI] bundle validate -o json -t default
{
"autoscale": {
"max_workers": 7,
@ -15,7 +15,7 @@
"spark_version": "13.3.x-scala2.12"
}
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"autoscale": {
"max_workers": 3,

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -27,12 +27,12 @@
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -27,21 +27,21 @@
}
}
>>> $CLI bundle validate -t development
>>> [CLI] bundle validate -t development
Name: override_job_cluster
Target: development
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
Validation OK!
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -64,11 +64,11 @@ Validation OK!
}
}
>>> $CLI bundle validate -t staging
>>> [CLI] bundle validate -t staging
Name: override_job_cluster
Target: staging
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json -t development
>>> errcode [CLI] bundle validate -o json -t development
Error: file ./test1.py not found

View File

@ -28,7 +28,7 @@
]
}
>>> errcode $CLI bundle validate -o json -t staging
>>> errcode [CLI] bundle validate -o json -t staging
Error: file ./test1.py not found
@ -63,14 +63,14 @@ Exit code: 1
]
}
>>> errcode $CLI bundle validate -t staging
>>> errcode [CLI] bundle validate -t staging
Error: file ./test1.py not found
Name: override_job_tasks
Target: staging
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_tasks/staging
Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
Warning: expected map, found string
at resources.clusters.my_cluster
in databricks.yml:6:17
@ -13,7 +13,7 @@ Warning: expected map, found string
}
}
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Warning: expected map, found string
at resources.clusters.my_cluster
in databricks.yml:6:17
@ -21,7 +21,7 @@ Warning: expected map, found string
Name: merge-string-map
Target: dev
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/merge-string-map/dev
Found 1 warning

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"clusters": [
@ -14,14 +14,14 @@
],
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/development/state/metadata.json"
},
"name": "job",
"permissions": []
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"clusters": [
@ -36,7 +36,7 @@
],
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/staging/state/metadata.json"
},
"name": "job",
"permissions": []

View File

@ -2,14 +2,14 @@
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
@ -19,7 +19,7 @@
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project"
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},

View File

@ -1,22 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2"
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook2"
}
}
]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json
>>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error
>>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: fallback
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/fallback/error
Found 1 error

View File

@ -2,14 +2,14 @@
{
"job_cluster_key": "default",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
},
"task_key": "notebook_example"
},
{
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
},
"task_key": "spark_python_example"
},
@ -19,7 +19,7 @@
"dbt run",
"dbt run"
],
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project"
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/dbt_project"
},
"job_cluster_key": "default",
"task_key": "dbt_example"
@ -28,7 +28,7 @@
"job_cluster_key": "default",
"sql_task": {
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
},
"warehouse_id": "cafef00d"
},
@ -68,7 +68,7 @@
"for_each_task": {
"task": {
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
}
}
},
@ -80,7 +80,7 @@
"task": {
"job_cluster_key": "default",
"spark_python_task": {
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
}
}
},

View File

@ -1,22 +1,22 @@
[
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file1.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook1"
}
},
{
"file": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file2.py"
}
},
{
"notebook": {
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2"
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook2"
}
}
]

View File

@ -1,15 +1,15 @@
>>> $CLI bundle validate -t development -o json
>>> [CLI] bundle validate -t development -o json
>>> $CLI bundle validate -t error
>>> [CLI] bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
Name: nominal
Target: error
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/nominal/error
Found 1 error

View File

@ -1,4 +1,4 @@
>>> $CLI bundle validate -t default -o json
>>> [CLI] bundle validate -t default -o json
>>> $CLI bundle validate -t override -o json
>>> [CLI] bundle validate -t override -o json

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t development
>>> [CLI] bundle validate -o json -t development
{
"mode": "development",
"quality_monitors": {
@ -21,7 +21,7 @@
}
}
>>> $CLI bundle validate -o json -t staging
>>> [CLI] bundle validate -o json -t staging
{
"mode": null,
"quality_monitors": {
@ -46,7 +46,7 @@
}
}
>>> $CLI bundle validate -o json -t production
>>> [CLI] bundle validate -o json -t production
{
"mode": null,
"quality_monitors": {

View File

@ -1,5 +1,5 @@
>>> EXITCODE=0 errcode $CLI bundle validate
>>> EXITCODE=0 errcode [CLI] bundle validate
Executing 'preinit' script
from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr!
@ -9,12 +9,12 @@ from myscript.py 0 postinit: hello stderr!
Name: scripts
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/scripts/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/scripts/default
Validation OK!
>>> EXITCODE=1 errcode $CLI bundle validate
>>> EXITCODE=1 errcode [CLI] bundle validate
Executing 'preinit' script
from myscript.py 1 preinit: hello stdout!
from myscript.py 1 preinit: hello stderr!
@ -26,7 +26,7 @@ Found 1 error
Exit code: 1
>>> EXITCODE=0 errcode $CLI bundle deploy
>>> EXITCODE=0 errcode [CLI] bundle deploy
Executing 'preinit' script
from myscript.py 0 preinit: hello stdout!
from myscript.py 0 preinit: hello stderr!
@ -42,7 +42,7 @@ from myscript.py 0 postbuild: hello stderr!
Executing 'predeploy' script
from myscript.py 0 predeploy: hello stdout!
from myscript.py 0 predeploy: hello stderr!
Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME.
Error: unable to deploy to /Workspace/Users/[USERNAME]/.bundle/scripts/default/state as [USERNAME].
Please make sure the current user or one of their groups is listed under the permissions of this bundle.
For assistance, contact the owners of this project.
They may need to redeploy the bundle to apply the new permissions.

View File

@ -1,10 +1,10 @@
Error: path "$TMPDIR" is not within repository root "$TMPDIR/myrepo"
Error: path "[TMPDIR]" is not within repository root "[TMPDIR]/myrepo"
Name: test-bundle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Found 1 error

View File

@ -1,7 +1,7 @@
Name: test-bundle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
Validation OK!

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init dbt-sql --config-file ./input.json --output-dir output
>>> [CLI] bundle init dbt-sql --config-file ./input.json --output-dir output
Welcome to the dbt template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL
workspace_host: [DATABRICKS_URL]
📊 Your new project has been created in the 'my_dbt_sql' directory!
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
Refer to the README.md file for full "getting started" guide and production setup instructions.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_dbt_sql
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_dbt_sql
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_dbt_sql/prod
Validation OK!

View File

@ -19,16 +19,16 @@ targets:
# See also https://docs.databricks.com/dev-tools/bundles/deployment-modes.html.
mode: development
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -5,7 +5,7 @@ fixed:
type: databricks
prompts:
host:
default: $DATABRICKS_HOST
default: [DATABRICKS_HOST]
token:
hint: 'personal access token to use, dapiXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
hide_input: true
@ -16,7 +16,7 @@ prompts:
hint: 'initial catalog'
default: main
schema:
hint: 'personal schema where dbt will build objects during development, example: $USERNAME'
hint: 'personal schema where dbt will build objects during development, example: [USERNAME]'
threads:
hint: 'threads to use during development, 1 or more'
type: 'int'

View File

@ -11,7 +11,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
tasks:

View File

@ -1,30 +1,30 @@
>>> $CLI bundle init default-python --config-file ./input.json --output-dir output
>>> [CLI] bundle init default-python --config-file ./input.json --output-dir output
Welcome to the default Python template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): $DATABRICKS_URL
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_python' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_default_python
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_default_python
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_python/prod
Validation OK!

View File

@ -16,16 +16,16 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
tasks:
- task_key: notebook_task

View File

@ -23,7 +23,7 @@ setup(
# to ensure that changes to wheel package are picked up when used on all-purpose clusters
version=my_default_python.__version__ + "+" + local_version,
url="https://databricks.com",
author="$USERNAME",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},

View File

@ -1,32 +1,32 @@
>>> $CLI bundle init default-sql --config-file ./input.json --output-dir output
>>> [CLI] bundle init default-sql --config-file ./input.json --output-dir output
Welcome to the default SQL template for Databricks Asset Bundles!
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
workspace_host: $DATABRICKS_URL
workspace_host: [DATABRICKS_URL]
✨ Your new project has been created in the 'my_default_sql' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: my_default_sql
Target: dev
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/dev
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/dev
Validation OK!
>>> $CLI bundle validate -t prod
>>> [CLI] bundle validate -t prod
Name: my_default_sql
Target: prod
Workspace:
Host: $DATABRICKS_URL
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/prod
Host: [DATABRICKS_URL]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/my_default_sql/prod
Validation OK!

View File

@ -25,7 +25,7 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
variables:
warehouse_id: f00dcafe
catalog: main
@ -34,15 +34,15 @@ targets:
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
variables:
warehouse_id: f00dcafe
catalog: main
schema: default
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -12,7 +12,7 @@ resources:
email_notifications:
on_failure:
- $USERNAME
- [USERNAME]
parameters:
- name: catalog

View File

@ -1,28 +1,28 @@
>>> $CLI bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output
>>> [CLI] bundle init experimental-jobs-as-code --config-file ./input.json --output-dir output
Welcome to (EXPERIMENTAL) "Jobs as code" template for Databricks Asset Bundles!
Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): $DATABRICKS_URL
Workspace to use (auto-detected, edit in 'my_jobs_as_code/databricks.yml'): [DATABRICKS_URL]
✨ Your new project has been created in the 'my_jobs_as_code' directory!
Please refer to the README.md file for "getting started" instructions.
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
>>> $CLI bundle validate -t dev --output json
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION
>>> [CLI] bundle validate -t dev --output json
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: [DEV_VERSION]
{
"jobs": {
"my_jobs_as_code_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"email_notifications": {
"on_failure": [
"$USERNAME"
"[USERNAME]"
]
},
"format": "MULTI_TASK",
@ -40,19 +40,19 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
}
],
"max_concurrent_runs": 4,
"name": "[dev $USERNAME] my_jobs_as_code_job",
"name": "[dev [USERNAME]] my_jobs_as_code_job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {
"dev": "$USERNAME"
"dev": "[USERNAME]"
},
"tasks": [
{
"job_cluster_key": "job_cluster",
"notebook_task": {
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/my_jobs_as_code/dev/files/src/notebook"
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/notebook"
},
"task_key": "notebook_task"
},

View File

@ -34,16 +34,16 @@ targets:
mode: development
default: true
workspace:
host: $DATABRICKS_URL
host: [DATABRICKS_URL]
prod:
mode: production
workspace:
host: $DATABRICKS_URL
# We explicitly specify /Workspace/Users/$USERNAME to make sure we only have a single copy.
root_path: /Workspace/Users/$USERNAME/.bundle/${bundle.name}/${bundle.target}
host: [DATABRICKS_URL]
# We explicitly specify /Workspace/Users/[USERNAME] to make sure we only have a single copy.
root_path: /Workspace/Users/[USERNAME]/.bundle/${bundle.name}/${bundle.target}
permissions:
- user_name: $USERNAME
- user_name: [USERNAME]
level: CAN_MANAGE
run_as:
user_name: $USERNAME
user_name: [USERNAME]

View File

@ -17,7 +17,7 @@ my_jobs_as_code_job = Job.from_dict(
},
"email_notifications": {
"on_failure": [
"$USERNAME",
"[USERNAME]",
],
},
"tasks": [

View File

@ -1,4 +1,4 @@
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'...
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world [TMPDIR]_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '[TMPDIR]_GPARENT/world-123456'...
fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate --var a=one -o json
>>> errcode [CLI] bundle validate --var a=one -o json
{
"a": {
"default": "hello",
@ -7,7 +7,7 @@
}
}
>>> errcode $CLI bundle validate --var a=one --var a=two
>>> errcode [CLI] bundle validate --var a=one --var a=two
Error: failed to assign two to a: variable has already been assigned value: one
Name: arg-repeat

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning

View File

@ -3,7 +3,7 @@ Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/cycle/default
Found 1 warning

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/default/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,10 +1,10 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"

View File

@ -4,7 +4,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
[
{
"task_key": "test default"

View File

@ -3,8 +3,8 @@ Error: no value assigned to required variable a. Assignment can be done using "-
Name: empty${var.a}
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/empty${var.a}/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/empty${var.a}/default
Found 1 error

View File

@ -1,27 +1,27 @@
>>> $CLI bundle validate -t env-with-single-variable-override -o json
>>> [CLI] bundle validate -t env-with-single-variable-override -o json
"default-a dev-b"
>>> $CLI bundle validate -t env-with-two-variable-overrides -o json
>>> [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a prod-b"
>>> BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json
>>> BUNDLE_VAR_b=env-var-b [CLI] bundle validate -t env-with-two-variable-overrides -o json
"prod-a env-var-b"
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
>>> errcode [CLI] bundle validate -t env-missing-a-required-variable-assignment
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: test bundle
Target: env-missing-a-required-variable-assignment
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/test bundle/env-missing-a-required-variable-assignment
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test bundle/env-missing-a-required-variable-assignment
Found 1 error
Exit code: 1
>>> errcode $CLI bundle validate -t env-using-an-undefined-variable
>>> errcode [CLI] bundle validate -t env-using-an-undefined-variable
Error: variable c is not defined but is assigned a value
Name: test bundle
@ -30,7 +30,7 @@ Found 1 error
Exit code: 1
>>> $CLI bundle validate -t env-overrides-lookup -o json
>>> [CLI] bundle validate -t env-overrides-lookup -o json
{
"a": "default-a",
"b": "prod-b",

View File

@ -1,6 +1,6 @@
=== variable file
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster",
"new_cluster": {
@ -10,7 +10,7 @@
}
=== variable file and variable flag
>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
>>> [CLI] bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
@ -20,7 +20,7 @@
}
=== variable file and environment variable
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden [CLI] bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
@ -30,7 +30,7 @@
}
=== variable has value in config file
>>> $CLI bundle validate -o json --target with_value
>>> [CLI] bundle validate -o json --target with_value
{
"job_cluster_key": "mlops_stacks-cluster-from-file",
"new_cluster": {
@ -40,8 +40,8 @@
}
=== file cannot be parsed
>>> errcode $CLI bundle validate -o json --target invalid_json
Error: failed to parse variables file $TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
>>> errcode [CLI] bundle validate -o json --target invalid_json
Error: failed to parse variables file [TMPDIR]/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
Exit code: 1
@ -54,8 +54,8 @@ Exit code: 1
}
=== file has wrong structure
>>> errcode $CLI bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file $TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
>>> errcode [CLI] bundle validate -o json --target wrong_file_structure
Error: failed to parse variables file [TMPDIR]/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
Variables file must be a JSON object with the following format:
{"var1": "value1", "var2": "value2"}
@ -71,7 +71,7 @@ Exit code: 1
}
=== file has variable that is complex but default is string
>>> errcode $CLI bundle validate -o json --target complex_to_string
>>> errcode [CLI] bundle validate -o json --target complex_to_string
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
@ -85,7 +85,7 @@ Exit code: 1
}
=== file has variable that is string but default is complex
>>> errcode $CLI bundle validate -o json --target string_to_complex
>>> errcode [CLI] bundle validate -o json --target string_to_complex
Error: variable cluster is of type complex, but the value in the variable file is not a complex type
@ -99,7 +99,7 @@ Exit code: 1
}
=== variable is required but it's not provided in the file
>>> errcode $CLI bundle validate -o json --target without_defaults
>>> errcode [CLI] bundle validate -o json --target without_defaults
Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file

View File

@ -1,8 +1,4 @@
# Fix for windows
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\wrong_file_structure\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json'
[[Repls]]
Old = '\$TMPDIR\\.databricks\\bundle\\invalid_json\\variable-overrides.json'
New = '$$TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json'
Old = '\\'
New = '/'

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json
>>> [CLI] bundle validate -o json
{
"bundle": {
"environment": "prod",
@ -11,7 +11,7 @@
"name": "git",
"target": "prod",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"sync": {
@ -27,24 +27,24 @@
}
},
"workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/prod",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/state"
"artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/artifacts",
"file_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/files",
"resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/resources",
"root_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod",
"state_path": "/Workspace/Users/[USERNAME]/.bundle/git/prod/state"
}
}
>>> $CLI bundle validate
>>> [CLI] bundle validate
Name: git
Target: prod
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git/prod
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git/prod
Validation OK!
>>> $CLI bundle validate -o json -t dev
>>> [CLI] bundle validate -o json -t dev
{
"bundle": {
"environment": "dev",
@ -56,7 +56,7 @@ Validation OK!
"name": "git",
"target": "dev",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"sync": {
@ -72,19 +72,19 @@ Validation OK!
}
},
"workspace": {
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/artifacts",
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/files",
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/resources",
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/dev",
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/state"
"artifact_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/artifacts",
"file_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/files",
"resource_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/resources",
"root_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev",
"state_path": "/Workspace/Users/[USERNAME]/.bundle/git/dev/state"
}
}
>>> $CLI bundle validate -t dev
>>> [CLI] bundle validate -t dev
Name: git
Target: dev
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/git/dev
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git/dev
Validation OK!

View File

@ -1,5 +1,5 @@
>>> errcode $CLI bundle validate -o json
>>> errcode [CLI] bundle validate -o json
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
{
@ -25,7 +25,7 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara
}
Exit code: 1
>>> errcode $CLI bundle validate
>>> errcode [CLI] bundle validate
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
Name: host

View File

@ -7,7 +7,7 @@
},
"target": "dev",
"terraform": {
"exec_path": "$TERRAFORM"
"exec_path": "[TERRAFORM]"
}
},
"resources": {
@ -15,7 +15,7 @@
"my_job": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Users/$USERNAME/path/to/root/state/metadata.json"
"metadata_file_path": "/Users/[USERNAME]/path/to/root/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
@ -29,7 +29,7 @@
"existing_cluster_id": "500",
"python_wheel_task": {
"named_parameters": {
"conf-file": "/Users/$USERNAME/path/to/root/files/path/to/config.yaml"
"conf-file": "/Users/[USERNAME]/path/to/root/files/path/to/config.yaml"
}
},
"task_key": ""
@ -46,17 +46,17 @@
"targets": null,
"variables": {
"workspace_root": {
"default": "/Users/$USERNAME",
"default": "/Users/[USERNAME]",
"description": "root directory in the Databricks workspace to store the asset bundle and associated artifacts",
"value": "/Users/$USERNAME"
"value": "/Users/[USERNAME]"
}
},
"workspace": {
"artifact_path": "/Users/$USERNAME/path/to/root/artifacts",
"file_path": "/Users/$USERNAME/path/to/root/files",
"artifact_path": "/Users/[USERNAME]/path/to/root/artifacts",
"file_path": "/Users/[USERNAME]/path/to/root/files",
"profile": "profile_name",
"resource_path": "/Users/$USERNAME/path/to/root/resources",
"root_path": "/Users/$USERNAME/path/to/root",
"state_path": "/Users/$USERNAME/path/to/root/state"
"resource_path": "/Users/[USERNAME]/path/to/root/resources",
"root_path": "/Users/[USERNAME]/path/to/root",
"state_path": "/Users/[USERNAME]/path/to/root/state"
}
}

View File

@ -20,7 +20,7 @@
"job1": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",

View File

@ -1,15 +1,15 @@
>>> BUNDLE_VAR_b=def $CLI bundle validate -o json
>>> BUNDLE_VAR_b=def [CLI] bundle validate -o json
"abc def"
>>> errcode $CLI bundle validate
>>> errcode [CLI] bundle validate
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: ${var.a} ${var.b}
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/${var.a} ${var.b}/default
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/${var.a} ${var.b}/default
Found 1 error

View File

@ -1,5 +1,5 @@
>>> $CLI bundle validate -o json -t use-default-variable-values
>>> [CLI] bundle validate -o json -t use-default-variable-values
{
"pipelines": {
"my_pipeline": {
@ -12,7 +12,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/use-default-variable-values/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/use-default-variable-values/state/metadata.json"
},
"name": "a_string",
"permissions": []
@ -20,7 +20,7 @@
}
}
>>> $CLI bundle validate -o json -t override-string-variable
>>> [CLI] bundle validate -o json -t override-string-variable
{
"pipelines": {
"my_pipeline": {
@ -33,7 +33,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-string-variable/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-string-variable/state/metadata.json"
},
"name": "overridden_string",
"permissions": []
@ -41,7 +41,7 @@
}
}
>>> $CLI bundle validate -o json -t override-int-variable
>>> [CLI] bundle validate -o json -t override-int-variable
{
"pipelines": {
"my_pipeline": {
@ -54,7 +54,7 @@
"continuous": true,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-int-variable/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-int-variable/state/metadata.json"
},
"name": "a_string",
"permissions": []
@ -62,7 +62,7 @@
}
}
>>> $CLI bundle validate -o json -t override-both-bool-and-string-variables
>>> [CLI] bundle validate -o json -t override-both-bool-and-string-variables
{
"pipelines": {
"my_pipeline": {
@ -75,7 +75,7 @@
"continuous": false,
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
},
"name": "overridden_string",
"permissions": []

View File

@ -15,7 +15,7 @@ import (
func StartCmdServer(t *testing.T) *testserver.Server {
server := testserver.New(t)
server.Handle("/", func(r *http.Request) (any, error) {
server.Handle("/", func(r *http.Request) (any, int) {
q := r.URL.Query()
args := strings.Split(q.Get("args"), " ")
@ -40,7 +40,7 @@ func StartCmdServer(t *testing.T) *testserver.Server {
exitcode = 1
}
result["exitcode"] = exitcode
return result, nil
return result, http.StatusOK
})
return server
}

View File

@ -47,6 +47,9 @@ type TestConfig struct {
// Record the requests made to the server and write them as output to
// out.requests.txt
RecordRequests bool
// List of request headers to include when recording requests.
IncludeRequestHeaders []string
}
type ServerStub struct {
@ -57,7 +60,8 @@ type ServerStub struct {
// The response body to return.
Response struct {
Body string
Body string
StatusCode int
}
}

View File

@ -18,13 +18,13 @@ Exit code: 7
=== Capturing pwd
>>> python3 -c import os; print(os.getcwd())
$TMPDIR
[TMPDIR]
=== Capturing subdir
>>> mkdir -p subdir/a/b/c
>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd())
$TMPDIR/subdir/a/b/c
[TMPDIR]/subdir/a/b/c
=== Custom output files - everything starting with out is captured and compared
>>> echo HELLO
@ -35,5 +35,5 @@ CUSTOM_NUMBER_REGEX
123456
=== Testing --version
>>> $CLI --version
Databricks CLI v$DEV_VERSION
>>> [CLI] --version
Databricks CLI v[DEV_VERSION]

View File

@ -16,5 +16,5 @@ New = "CUSTOM_NUMBER_REGEX"
[[Repls]]
# Fix path with reverse slashes in the output for Windows.
Old = '\$TMPDIR\\subdir\\a\\b\\c'
New = '$$TMPDIR/subdir/a/b/c'
Old = 'TMPDIR]\\subdir\\a\\b\\c'
New = 'TMPDIR]/subdir/a/b/c'

View File

@ -11,7 +11,7 @@ import (
)
func AddHandlers(server *testserver.Server) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, int) {
return compute.ListPoliciesResponse{
Policies: []compute.Policy{
{
@ -23,10 +23,10 @@ func AddHandlers(server *testserver.Server) {
Name: "some-test-cluster-policy",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, int) {
return compute.ListInstancePools{
InstancePools: []compute.InstancePoolAndStats{
{
@ -34,10 +34,10 @@ func AddHandlers(server *testserver.Server) {
InstancePoolId: "1234",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, int) {
return compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -49,32 +49,32 @@ func AddHandlers(server *testserver.Server) {
ClusterId: "9876",
},
},
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, int) {
return iam.User{
Id: "1000012345",
UserName: "tester@databricks.com",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, int) {
return workspace.ObjectInfo{
ObjectId: 1001,
ObjectType: "DIRECTORY",
Path: "",
ResourceId: "1001",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, int) {
return catalog.MetastoreAssignment{
DefaultCatalogName: "main",
}, nil
}, http.StatusOK
})
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, int) {
return workspace.WorkspaceObjectPermissions{
ObjectId: "1001",
ObjectType: "DIRECTORY",
@ -88,10 +88,10 @@ func AddHandlers(server *testserver.Server) {
},
},
},
}, nil
}, http.StatusOK
})
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, error) {
return "{}", nil
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, int) {
return "{}", http.StatusOK
})
}

View File

@ -1,5 +1,5 @@
>>> $TERRAFORM init -no-color -get=false
>>> [TERRAFORM] init -no-color -get=false
Initializing the backend...
@ -35,12 +35,12 @@ If you ever set or change modules or backend configuration for Terraform,
rerun this command to reinitialize your working directory. If you forget, other
commands will detect it and remind you to do so if necessary.
>>> $TERRAFORM plan -no-color
>>> [TERRAFORM] plan -no-color
data.databricks_current_user.me: Reading...
data.databricks_current_user.me: Read complete after (redacted) [id=$USER.Id]
data.databricks_current_user.me: Read complete after (redacted) [id=[USERID]]
Changes to Outputs:
+ username = "$USERNAME"
+ username = "[USERNAME]"
You can apply this plan to save these new output values to the Terraform
state, without changing any real infrastructure.

View File

@ -0,0 +1 @@
{"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}

View File

@ -0,0 +1,5 @@
>>> [CLI] jobs create --json {"name":"abc"}
Error: Invalid access token.
Exit code: 1

View File

@ -0,0 +1 @@
trace $CLI jobs create --json '{"name":"abc"}'

View File

@ -0,0 +1,12 @@
LocalOnly = true # request recording currently does not work with cloud environment
RecordRequests = true
[[Server]]
Pattern = "POST /api/2.1/jobs/create"
Response.Body = '''
{
"error_code": "PERMISSION_DENIED",
"message": "Invalid access token."
}
'''
Response.StatusCode = 403

View File

@ -1 +1 @@
{"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}
{"headers":{"Authorization":"Bearer dapi1234","User-Agent":"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/jobs_create cmd-exec-id/[UUID] auth/pat"},"method":"POST","path":"/api/2.1/jobs/create","body":{"name":"abc"}}

View File

@ -1,5 +1,5 @@
>>> $CLI jobs create --json {"name":"abc"}
>>> [CLI] jobs create --json {"name":"abc"}
{
"job_id":1111
}

View File

@ -1,5 +1,6 @@
LocalOnly = true # request recording currently does not work with cloud environment
RecordRequests = true
IncludeRequestHeaders = ["Authorization", "User-Agent"]
[[Server]]
Pattern = "POST /api/2.1/jobs/create"
@ -8,3 +9,19 @@ Response.Body = '''
"job_id": 1111
}
'''
[[Repls]]
Old = "(linux|darwin|windows)"
New = "[OS]"
[[Repls]]
Old = " upstream/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " upstream-version/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " cicd/[A-Za-z0-9.-]+"
New = ""

View File

@ -71,11 +71,11 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
continue
}
seen[rel] = true
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" {
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" && filepath.Ext(rel) != ".json" {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "Files in the 'include' configuration section must be YAML files.",
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
Summary: "Files in the 'include' configuration section must be YAML or JSON files.",
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
Locations: b.Config.GetLocations(fmt.Sprintf("include[%d]", i)),
})
continue

View File

@ -101,9 +101,9 @@ var envCopy = []string{
// same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI.
"PATH",
// Include $AZURE_CONFIG_FILE in set of environment variables to pass along.
// Include $AZURE_CONFIG_DIR in set of environment variables to pass along.
// This is set in Azure DevOps by the AzureCLI@2 task.
"AZURE_CONFIG_FILE",
"AZURE_CONFIG_DIR",
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration

View File

@ -292,7 +292,7 @@ func TestInheritEnvVars(t *testing.T) {
t.Setenv("HOME", "/home/testuser")
t.Setenv("PATH", "/foo:/bar")
t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc")
t.Setenv("AZURE_CONFIG_FILE", "/tmp/foo/bar")
t.Setenv("AZURE_CONFIG_DIR", "/tmp/foo/bar")
ctx := context.Background()
env := map[string]string{}
@ -301,7 +301,7 @@ func TestInheritEnvVars(t *testing.T) {
assert.Equal(t, "/home/testuser", env["HOME"])
assert.Equal(t, "/foo:/bar", env["PATH"])
assert.Equal(t, "/tmp/config.tfrc", env["TF_CLI_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_FILE"])
assert.Equal(t, "/tmp/foo/bar", env["AZURE_CONFIG_DIR"])
}
}

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# <DABS> resources
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).

View File

@ -1,8 +1,9 @@
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
---
description: Configuration reference for databricks.yml
---
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
# Configuration reference
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).

Some files were not shown because too many files have changed in this diff Show More