This commit is contained in:
Shreyas Goenka 2025-01-24 14:25:53 +01:00
commit 24be18a2c7
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
30 changed files with 358 additions and 43 deletions

View File

@ -1,5 +1,25 @@
# Version changelog
## [Release] Release v0.239.1
CLI:
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
Bundles:
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
## [Release] Release v0.239.0
### New feature announcement

View File

@ -94,13 +94,13 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
}
t.Setenv("CLI", execPath)
repls.Set(execPath, "$CLI")
repls.SetPath(execPath, "$CLI")
// Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
tempHomeDir := t.TempDir()
repls.Set(tempHomeDir, "$TMPHOME")
repls.SetPath(tempHomeDir, "$TMPHOME")
t.Logf("$TMPHOME=%v", tempHomeDir)
// Prevent CLI from downloading terraform in each test:
@ -202,11 +202,6 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
})
}
// Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko
tmpDirEvalled, err1 := filepath.EvalSymlinks(tmpDir)
if err1 == nil && tmpDirEvalled != tmpDir {
repls.SetPathWithParents(tmpDirEvalled, "$TMPDIR")
}
repls.SetPathWithParents(tmpDir, "$TMPDIR")
scriptContents := readMergedScriptContents(t, dir)

View File

@ -0,0 +1,6 @@
bundle:
name: non_yaml_in_includes
include:
- test.py
- resources/*.yml

View File

@ -0,0 +1,10 @@
Error: Files in the 'include' configuration section must be YAML files.
in databricks.yml:5:4
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
Name: non_yaml_in_includes
Found 1 error
Exit code: 1

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -0,0 +1 @@
print("Hello world")

View File

@ -1,8 +1,6 @@
>>> $CLI bundle validate -t development -o json
Exit code: 0
>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]

View File

@ -1,8 +1,6 @@
>>> $CLI bundle validate -t development -o json
Exit code: 0
>>> $CLI bundle validate -t error
Error: notebook this value is overridden not found. Local notebook references are expected
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]

View File

@ -1,7 +1,5 @@
>>> errcode $CLI bundle validate --var a=one -o json
Exit code: 0
{
"a": {
"default": "hello",

View File

@ -1,4 +1,4 @@
Error: no value assigned to required variable a. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_a environment variable
Error: no value assigned to required variable a. Assignment can be done using "--var", by setting the BUNDLE_VAR_a environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: empty${var.a}
Target: default

View File

@ -9,7 +9,7 @@
"prod-a env-var-b"
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: test bundle
Target: env-missing-a-required-variable-assignment

View File

@ -0,0 +1,5 @@
{
"cluster_key": {
"node_type_id": "Standard_DS3_v2"
}
}

View File

@ -0,0 +1,7 @@
{
"cluster": {
"node_type_id": "Standard_DS3_v2"
},
"cluster_key": "mlops_stacks-cluster",
"cluster_workers": 2
}

View File

@ -0,0 +1,3 @@
{
"cluster": "mlops_stacks-cluster"
}

View File

@ -0,0 +1,3 @@
{
"cluster_key": "mlops_stacks-cluster-from-file"
}

View File

@ -0,0 +1,4 @@
{
"cluster_key": "mlops_stacks-cluster",
"cluster_workers": 2
}

View File

@ -0,0 +1 @@
!.databricks

View File

@ -0,0 +1,53 @@
bundle:
name: TestResolveVariablesFromFile
variables:
cluster:
type: "complex"
cluster_key:
cluster_workers:
resources:
jobs:
job1:
job_clusters:
- job_cluster_key: ${var.cluster_key}
new_cluster:
node_type_id: "${var.cluster.node_type_id}"
num_workers: ${var.cluster_workers}
targets:
default:
default: true
variables:
cluster_workers: 1
cluster:
node_type_id: "default"
cluster_key: "default"
without_defaults:
complex_to_string:
variables:
cluster_workers: 1
cluster:
node_type_id: "default"
cluster_key: "default"
string_to_complex:
variables:
cluster_workers: 1
cluster:
node_type_id: "default"
cluster_key: "default"
wrong_file_structure:
invalid_json:
with_value:
variables:
cluster_workers: 1
cluster:
node_type_id: "default"
cluster_key: cluster_key_value

View File

@ -0,0 +1,82 @@
=== variable file
>>> $CLI bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster",
"new_cluster": {
"node_type_id": "Standard_DS3_v2",
"num_workers": 2
}
}
=== variable file and variable flag
>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
"node_type_id": "Standard_DS3_v2",
"num_workers": 2
}
}
=== variable file and environment variable
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json
{
"job_cluster_key": "mlops_stacks-cluster-overriden",
"new_cluster": {
"node_type_id": "Standard_DS3_v2",
"num_workers": 2
}
}
=== variable has value in config file
>>> $CLI bundle validate -o json --target with_value
{
"job_cluster_key": "mlops_stacks-cluster-from-file",
"new_cluster": {
"node_type_id": "default",
"num_workers": 1
}
}
=== file has variable that is complex but default is string
>>> errcode $CLI bundle validate -o json --target complex_to_string
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
Exit code: 1
{
"job_cluster_key": "${var.cluster_key}",
"new_cluster": {
"node_type_id": "${var.cluster.node_type_id}",
"num_workers": "${var.cluster_workers}"
}
}
=== file has variable that is string but default is complex
>>> errcode $CLI bundle validate -o json --target string_to_complex
Error: variable cluster is of type complex, but the value in the variable file is not a complex type
Exit code: 1
{
"job_cluster_key": "${var.cluster_key}",
"new_cluster": {
"node_type_id": "${var.cluster.node_type_id}",
"num_workers": "${var.cluster_workers}"
}
}
=== variable is required but it's not provided in the file
>>> errcode $CLI bundle validate -o json --target without_defaults
Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Exit code: 1
{
"job_cluster_key": "${var.cluster_key}",
"new_cluster": {
"node_type_id": "${var.cluster.node_type_id}",
"num_workers": "${var.cluster_workers}"
}
}

View File

@ -0,0 +1,30 @@
cluster_expr=".resources.jobs.job1.job_clusters[0]"
# defaults from variable file, see .databricks/bundle/<target>/variable-overrides.json
title "variable file"
trace $CLI bundle validate -o json | jq $cluster_expr
title "variable file and variable flag"
trace $CLI bundle validate -o json --var="cluster_key=mlops_stacks-cluster-overriden" | jq $cluster_expr
title "variable file and environment variable"
trace BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json | jq $cluster_expr
title "variable has value in config file"
trace $CLI bundle validate -o json --target with_value | jq $cluster_expr
# title "file cannot be parsed"
# trace errcode $CLI bundle validate -o json --target invalid_json | jq $cluster_expr
# title "file has wrong structure"
# trace errcode $CLI bundle validate -o json --target wrong_file_structure | jq $cluster_expr
title "file has variable that is complex but default is string"
trace errcode $CLI bundle validate -o json --target complex_to_string | jq $cluster_expr
title "file has variable that is string but default is complex"
trace errcode $CLI bundle validate -o json --target string_to_complex | jq $cluster_expr
title "variable is required but it's not provided in the file"
trace errcode $CLI bundle validate -o json --target without_defaults | jq $cluster_expr

View File

@ -3,7 +3,7 @@
"abc def"
>>> errcode $CLI bundle validate
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
Name: ${var.a} ${var.b}
Target: default

View File

@ -6,7 +6,9 @@ errcode() {
local exit_code=$?
# Re-enable 'set -e' if it was previously set
set -e
>&2 printf "\nExit code: $exit_code\n"
if [ $exit_code -ne 0 ]; then
>&2 printf "\nExit code: $exit_code\n"
fi
}
trace() {
@ -40,3 +42,8 @@ git-repo-init() {
git add databricks.yml
git commit -qm 'Add databricks.yml'
}
title() {
local label="$1"
printf "\n=== %s" "$label"
}

View File

@ -20,7 +20,7 @@ func StartServer(t *testing.T) *testserver.Server {
}
func AddHandlers(server *testserver.Server) {
server.Handle("/api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
return compute.ListPoliciesResponse{
Policies: []compute.Policy{
{
@ -35,7 +35,7 @@ func AddHandlers(server *testserver.Server) {
}, nil
})
server.Handle("/api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
return compute.ListInstancePools{
InstancePools: []compute.InstancePoolAndStats{
{
@ -46,7 +46,7 @@ func AddHandlers(server *testserver.Server) {
}, nil
})
server.Handle("/api/2.1/clusters/list", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) {
return compute.ListClustersResponse{
Clusters: []compute.ClusterDetails{
{
@ -61,13 +61,13 @@ func AddHandlers(server *testserver.Server) {
}, nil
})
server.Handle("/api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
return iam.User{
UserName: "tester@databricks.com",
}, nil
})
server.Handle("/api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
return workspace.ObjectInfo{
ObjectId: 1001,
ObjectType: "DIRECTORY",
@ -76,13 +76,13 @@ func AddHandlers(server *testserver.Server) {
}, nil
})
server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
return catalog.MetastoreAssignment{
DefaultCatalogName: "main",
}, nil
})
server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
return workspace.WorkspaceObjectPermissions{
ObjectId: "1001",
ObjectType: "DIRECTORY",

View File

@ -2,6 +2,7 @@ package loader
import (
"context"
"fmt"
"path/filepath"
"slices"
"strings"
@ -36,6 +37,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
// Maintain list of files in order of files being loaded.
// This is stored in the bundle configuration for observability.
var files []string
var diags diag.Diagnostics
// For each glob, find all files to load.
// Ordering of the list of globs is maintained in the output.
@ -60,7 +62,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
// Filter matches to ones we haven't seen yet.
var includes []string
for _, match := range matches {
for i, match := range matches {
rel, err := filepath.Rel(b.BundleRootPath, match)
if err != nil {
return diag.FromErr(err)
@ -69,9 +71,22 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
continue
}
seen[rel] = true
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "Files in the 'include' configuration section must be YAML files.",
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
Locations: b.Config.GetLocations(fmt.Sprintf("include[%d]", i)),
})
continue
}
includes = append(includes, rel)
}
if len(diags) > 0 {
return diags
}
// Add matches to list of mutators to return.
slices.Sort(includes)
files = append(files, includes...)

View File

@ -3,11 +3,14 @@ package mutator
import (
"context"
"fmt"
"os"
"path/filepath"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/jsonloader"
"github.com/databricks/cli/libs/env"
)
@ -23,7 +26,11 @@ func (m *setVariables) Name() string {
return "SetVariables"
}
func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string) (dyn.Value, error) {
func getDefaultVariableFilePath(target string) string {
return ".databricks/bundle/" + target + "/variable-overrides.json"
}
func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string, fileDefault dyn.Value) (dyn.Value, error) {
// case: variable already has value initialized, so skip
if variable.HasValue() {
return v, nil
@ -49,6 +56,26 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
return v, nil
}
// case: Set the variable to the default value from the variable file
if fileDefault.Kind() != dyn.KindInvalid && fileDefault.Kind() != dyn.KindNil {
hasComplexType := variable.IsComplex()
hasComplexValue := fileDefault.Kind() == dyn.KindMap || fileDefault.Kind() == dyn.KindSequence
if hasComplexType && !hasComplexValue {
return dyn.InvalidValue, fmt.Errorf(`variable %s is of type complex, but the value in the variable file is not a complex type`, name)
}
if !hasComplexType && hasComplexValue {
return dyn.InvalidValue, fmt.Errorf(`variable %s is not of type complex, but the value in the variable file is a complex type`, name)
}
v, err := dyn.Set(v, "value", fileDefault)
if err != nil {
return dyn.InvalidValue, fmt.Errorf(`failed to assign default value from variable file to variable %s with error: %v`, name, err)
}
return v, nil
}
// case: Set the variable to its default value
if variable.HasDefault() {
vDefault, err := dyn.Get(v, "default")
@ -64,10 +91,43 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
}
// We should have had a value to set for the variable at this point.
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done using "--var", by setting the %s environment variable, or in %s file`, name, bundleVarPrefix+name, getDefaultVariableFilePath("<target>"))
}
func readVariablesFromFile(b *bundle.Bundle) (dyn.Value, diag.Diagnostics) {
var diags diag.Diagnostics
filePath := filepath.Join(b.BundleRootPath, getDefaultVariableFilePath(b.Config.Bundle.Target))
if _, err := os.Stat(filePath); err != nil {
return dyn.InvalidValue, nil
}
f, err := os.ReadFile(filePath)
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to read variables file: %w", err))
}
val, err := jsonloader.LoadJSON(f, filePath)
if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse variables file %s: %w", filePath, err))
}
if val.Kind() != dyn.KindMap {
return dyn.InvalidValue, diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: fmt.Sprintf("failed to parse variables file %s: invalid format", filePath),
Detail: "Variables file must be a JSON object with the following format:\n{\"var1\": \"value1\", \"var2\": \"value2\"}",
})
}
return val, nil
}
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
defaults, diags := readVariablesFromFile(b)
if diags.HasError() {
return diags
}
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
return dyn.Map(v, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
name := p[1].Key()
@ -76,9 +136,10 @@ func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
return dyn.InvalidValue, fmt.Errorf(`variable "%s" is not defined`, name)
}
return setVariable(ctx, variable, v, name)
fileDefault, _ := dyn.Get(defaults, name)
return setVariable(ctx, variable, v, name, fileDefault)
}))
})
return diag.FromErr(err)
return diags.Extend(diag.FromErr(err))
}

View File

@ -25,7 +25,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
v, err = setVariable(context.Background(), v, &variable, "foo")
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
require.NoError(t, err)
err = convert.ToTyped(&variable, v)
@ -43,7 +43,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
v, err = setVariable(context.Background(), v, &variable, "foo")
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
require.NoError(t, err)
err = convert.ToTyped(&variable, v)
@ -65,7 +65,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
v, err = setVariable(context.Background(), v, &variable, "foo")
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
require.NoError(t, err)
err = convert.ToTyped(&variable, v)
@ -90,7 +90,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
v, err = setVariable(context.Background(), v, &variable, "foo")
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
require.NoError(t, err)
err = convert.ToTyped(&variable, v)
@ -107,8 +107,8 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
_, err = setVariable(context.Background(), v, &variable, "foo")
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable")
_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done using \"--var\", by setting the BUNDLE_VAR_foo environment variable, or in .databricks/bundle/<target>/variable-overrides.json file")
}
func TestSetVariablesMutator(t *testing.T) {
@ -157,6 +157,6 @@ func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) {
v, err := convert.FromTyped(variable, dyn.NilValue)
require.NoError(t, err)
_, err = setVariable(context.Background(), v, &variable, "foo")
_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
assert.ErrorContains(t, err, "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
}

View File

@ -36,11 +36,12 @@ type Variable struct {
// This field stores the resolved value for the variable. The variable are
// resolved in the following priority order (from highest to lowest)
//
// 1. Command line flag. For example: `--var="foo=bar"`
// 2. Target variable. eg: BUNDLE_VAR_foo=bar
// 3. Default value as defined in the applicable environments block
// 4. Default value defined in variable definition
// 5. Throw error, since if no default value is defined, then the variable
// 1. Command line flag `--var="foo=bar"`
// 2. Environment variable. eg: BUNDLE_VAR_foo=bar
// 3. Load defaults from .databricks/bundle/<target>/variable-overrides.json
// 4. Default value as defined in the applicable targets block
// 5. Default value defined in variable definition
// 6. Throw error, since if no default value is defined, then the variable
// is required
Value VariableValue `json:"value,omitempty" bundle:"readonly"`

View File

@ -94,6 +94,18 @@ func trimQuotes(s string) string {
}
func (r *ReplacementsContext) SetPath(old, new string) {
if old != "" && old != "." {
// Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko
oldEvalled, err1 := filepath.EvalSymlinks(old)
if err1 == nil && oldEvalled != old {
r.SetPathNoEval(oldEvalled, new)
}
}
r.SetPathNoEval(old, new)
}
func (r *ReplacementsContext) SetPathNoEval(old, new string) {
r.Set(old, new)
if runtime.GOOS != "windows" {
@ -133,7 +145,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
r.Set(w.Config.Token, "$DATABRICKS_TOKEN")
r.Set(w.Config.Username, "$DATABRICKS_USERNAME")
r.Set(w.Config.Password, "$DATABRICKS_PASSWORD")
r.Set(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
r.SetPath(w.Config.Profile, "$DATABRICKS_CONFIG_PROFILE")
r.Set(w.Config.ConfigFile, "$DATABRICKS_CONFIG_FILE")
r.Set(w.Config.GoogleServiceAccount, "$DATABRICKS_GOOGLE_SERVICE_ACCOUNT")
r.Set(w.Config.GoogleCredentials, "$GOOGLE_CREDENTIALS")
@ -147,7 +159,7 @@ func PrepareReplacementsWorkspaceClient(t testutil.TestingT, r *ReplacementsCont
r.Set(w.Config.AzureEnvironment, "$ARM_ENVIRONMENT")
r.Set(w.Config.ClientID, "$DATABRICKS_CLIENT_ID")
r.Set(w.Config.ClientSecret, "$DATABRICKS_CLIENT_SECRET")
r.Set(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
r.SetPath(w.Config.DatabricksCliPath, "$DATABRICKS_CLI_PATH")
// This is set to words like "path" that happen too frequently
// r.Set(w.Config.AuthType, "$DATABRICKS_AUTH_TYPE")
}