Merge remote-tracking branch 'origin' into acc-test-schema-dep

This commit is contained in:
Shreyas Goenka 2025-01-16 16:21:40 +01:00
commit fa43a4de38
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
21 changed files with 191 additions and 14 deletions

View File

@ -0,0 +1,12 @@
bundle:
name: complex-cross-ref
variables:
a:
default:
a_1: 500
a_2: ${var.b.b_2}
b:
default:
b_1: ${var.a.a_1}
b_2: 2.5

View File

@ -0,0 +1,22 @@
{
"a": {
"default": {
"a_1": 500,
"a_2": 2.5
},
"value": {
"a_1": 500,
"a_2": 2.5
}
},
"b": {
"default": {
"b_1": 500,
"b_2": 2.5
},
"value": {
"b_1": 500,
"b_2": 2.5
}
}
}

View File

@ -0,0 +1 @@
$CLI bundle validate -o json | jq .variables

View File

@ -0,0 +1,7 @@
bundle:
name: cycle
variables:
a:
default:
hello: ${var.a}

View File

@ -0,0 +1,9 @@
Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
Found 1 warning

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -0,0 +1,10 @@
bundle:
name: cycle
variables:
a:
default:
hello: ${var.b}
b:
default:
hello: ${var.a}

View File

@ -0,0 +1,9 @@
Warning: Detected unresolved variables after 11 resolution rounds
Name: cycle
Target: default
Workspace:
User: $USERNAME
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
Found 1 warning

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -0,0 +1,21 @@
bundle:
name: complex-transitive
variables:
catalog:
default: hive_metastore
spark_conf_1:
default:
"spark.databricks.sql.initial.catalog.name": ${var.catalog}
spark_conf:
default: ${var.spark_conf_1}
etl_cluster_config:
type: complex
default:
spark_version: 14.3.x-scala2.12
runtime_engine: PHOTON
spark_conf: ${var.spark_conf}
resources:
clusters:
my_cluster: ${var.etl_cluster_config}

View File

@ -0,0 +1,3 @@
{
"spark.databricks.sql.initial.catalog.name": "hive_metastore"
}

View File

@ -0,0 +1,2 @@
# Currently, this incorrectly outputs variable reference instead of resolved value
$CLI bundle validate -o json | jq '.resources.clusters.my_cluster.spark_conf'

View File

@ -0,0 +1,8 @@
bundle:
name: cycle
variables:
a:
default: ${var.b}
b:
default: ${var.a}

View File

@ -0,0 +1,14 @@
Error: cycle detected in field resolution: variables.a.default -> var.b -> var.a -> var.b
{
"a": {
"default": "${var.b}",
"value": "${var.b}"
},
"b": {
"default": "${var.a}",
"value": "${var.a}"
}
}
Exit code: 1

View File

@ -0,0 +1 @@
$CLI bundle validate -o json | jq .variables

View File

@ -14,7 +14,7 @@ type slowDeployMessage struct{}
// See https://github.com/databricks/cli/pull/2144 // See https://github.com/databricks/cli/pull/2144
func (v *slowDeployMessage) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (v *slowDeployMessage) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
if len(b.Config.Resources.Apps) > 0 { if len(b.Config.Resources.Apps) > 0 {
cmdio.LogString(ctx, "Databricks apps in your bundle can slow initial deployment as they wait for compute provisioning.") cmdio.LogString(ctx, "Note: Databricks apps included in this bundle may increase initial deployment time due to compute provisioning.")
} }
return nil return nil

View File

@ -3,6 +3,7 @@ package mutator
import ( import (
"context" "context"
"errors" "errors"
"fmt"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -13,15 +14,37 @@ import (
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
) )
/*
For pathological cases, output and time grow exponentially.
On my laptop, timings for acceptance/bundle/variables/complex-cycle:
rounds time
9 0.10s
10 0.13s
11 0.27s
12 0.68s
13 1.98s
14 6.28s
15 21.70s
16 78.16s
*/
const maxResolutionRounds = 11
type resolveVariableReferences struct { type resolveVariableReferences struct {
prefixes []string prefixes []string
pattern dyn.Pattern pattern dyn.Pattern
lookupFn func(dyn.Value, dyn.Path, *bundle.Bundle) (dyn.Value, error) lookupFn func(dyn.Value, dyn.Path, *bundle.Bundle) (dyn.Value, error)
skipFn func(dyn.Value) bool skipFn func(dyn.Value) bool
extraRounds int
} }
func ResolveVariableReferences(prefixes ...string) bundle.Mutator { func ResolveVariableReferences(prefixes ...string) bundle.Mutator {
return &resolveVariableReferences{prefixes: prefixes, lookupFn: lookup} return &resolveVariableReferences{
prefixes: prefixes,
lookupFn: lookup,
extraRounds: maxResolutionRounds - 1,
}
} }
func ResolveVariableReferencesInLookup() bundle.Mutator { func ResolveVariableReferencesInLookup() bundle.Mutator {
@ -86,7 +109,36 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
varPath := dyn.NewPath(dyn.Key("var")) varPath := dyn.NewPath(dyn.Key("var"))
var diags diag.Diagnostics var diags diag.Diagnostics
maxRounds := 1 + m.extraRounds
for round := range maxRounds {
hasUpdates, newDiags := m.resolveOnce(b, prefixes, varPath)
diags = diags.Extend(newDiags)
if diags.HasError() {
break
}
if !hasUpdates {
break
}
if round >= maxRounds-1 {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning,
Summary: fmt.Sprintf("Detected unresolved variables after %d resolution rounds", round+1),
// Would be nice to include names of the variables there, but that would complicate things more
})
break
}
}
return diags
}
func (m *resolveVariableReferences) resolveOnce(b *bundle.Bundle, prefixes []dyn.Path, varPath dyn.Path) (bool, diag.Diagnostics) {
var diags diag.Diagnostics
hasUpdates := false
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
// Synthesize a copy of the root that has all fields that are present in the type // Synthesize a copy of the root that has all fields that are present in the type
// but not set in the dynamic value set to their corresponding empty value. // but not set in the dynamic value set to their corresponding empty value.
@ -129,6 +181,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
if m.skipFn != nil && m.skipFn(v) { if m.skipFn != nil && m.skipFn(v) {
return dyn.InvalidValue, dynvar.ErrSkipResolution return dyn.InvalidValue, dynvar.ErrSkipResolution
} }
hasUpdates = true
return m.lookupFn(normalized, path, b) return m.lookupFn(normalized, path, b)
} }
} }
@ -149,5 +202,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))
} }
return diags
return hasUpdates, diags
} }

View File

@ -1,3 +1,3 @@
package schema package schema
const ProviderVersion = "1.62.0" const ProviderVersion = "1.63.0"

View File

@ -13,8 +13,13 @@ type ResourceExternalLocationEncryptionDetails struct {
type ResourceExternalLocation struct { type ResourceExternalLocation struct {
AccessPoint string `json:"access_point,omitempty"` AccessPoint string `json:"access_point,omitempty"`
BrowseOnly bool `json:"browse_only,omitempty"`
Comment string `json:"comment,omitempty"` Comment string `json:"comment,omitempty"`
CreatedAt int `json:"created_at,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
CredentialId string `json:"credential_id,omitempty"`
CredentialName string `json:"credential_name"` CredentialName string `json:"credential_name"`
Fallback bool `json:"fallback,omitempty"`
ForceDestroy bool `json:"force_destroy,omitempty"` ForceDestroy bool `json:"force_destroy,omitempty"`
ForceUpdate bool `json:"force_update,omitempty"` ForceUpdate bool `json:"force_update,omitempty"`
Id string `json:"id,omitempty"` Id string `json:"id,omitempty"`
@ -24,6 +29,8 @@ type ResourceExternalLocation struct {
Owner string `json:"owner,omitempty"` Owner string `json:"owner,omitempty"`
ReadOnly bool `json:"read_only,omitempty"` ReadOnly bool `json:"read_only,omitempty"`
SkipValidation bool `json:"skip_validation,omitempty"` SkipValidation bool `json:"skip_validation,omitempty"`
UpdatedAt int `json:"updated_at,omitempty"`
UpdatedBy string `json:"updated_by,omitempty"`
Url string `json:"url"` Url string `json:"url"`
EncryptionDetails *ResourceExternalLocationEncryptionDetails `json:"encryption_details,omitempty"` EncryptionDetails *ResourceExternalLocationEncryptionDetails `json:"encryption_details,omitempty"`
} }

View File

@ -21,7 +21,7 @@ type Root struct {
const ProviderHost = "registry.terraform.io" const ProviderHost = "registry.terraform.io"
const ProviderSource = "databricks/databricks" const ProviderSource = "databricks/databricks"
const ProviderVersion = "1.62.0" const ProviderVersion = "1.63.0"
func NewRoot() *Root { func NewRoot() *Root {
return &Root{ return &Root{

View File

@ -66,11 +66,6 @@ func Initialize() bundle.Mutator {
"workspace", "workspace",
"variables", "variables",
), ),
mutator.ResolveVariableReferences(
"bundle",
"workspace",
"variables",
),
mutator.MergeJobClusters(), mutator.MergeJobClusters(),
mutator.MergeJobParameters(), mutator.MergeJobParameters(),