Merge remote-tracking branch 'origin' into improve/json-schema

This commit is contained in:
Shreyas Goenka 2024-09-05 14:16:14 +02:00
commit e55df7b207
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
12 changed files with 160 additions and 41 deletions

View File

@ -1,5 +1,34 @@
# Version changelog # Version changelog
## [Release] Release v0.228.0
CLI:
* Do not error if we cannot prompt for a profile in `auth login` ([#1745](https://github.com/databricks/cli/pull/1745)).
Bundles:
As of this release, the CLI will show a prompt if there are configuration changes that lead to DLT pipeline recreation.
Users can skip the prompt by specifying the `--auto-approve` flag.
* Pass along to Terraform process ([#1734](https://github.com/databricks/cli/pull/1734)).
* Add prompt when a pipeline recreation happens ([#1672](https://github.com/databricks/cli/pull/1672)).
* Use materialized views in the default-sql template ([#1709](https://github.com/databricks/cli/pull/1709)).
* Update templates to latest LTS DBR ([#1715](https://github.com/databricks/cli/pull/1715)).
* Make lock optional in the JSON schema ([#1738](https://github.com/databricks/cli/pull/1738)).
* Do not suppress normalisation diagnostics for resolving variables ([#1740](https://github.com/databricks/cli/pull/1740)).
* Include a permissions section in all templates ([#1713](https://github.com/databricks/cli/pull/1713)).
* Fixed complex variables are not being correctly merged from include files ([#1746](https://github.com/databricks/cli/pull/1746)).
* Fixed variable override in target with full variable syntax ([#1749](https://github.com/databricks/cli/pull/1749)).
Internal:
* Consider serverless clusters as compatible for Python wheel tasks ([#1733](https://github.com/databricks/cli/pull/1733)).
* PythonMutator: explain missing package error ([#1736](https://github.com/databricks/cli/pull/1736)).
* Add `dyn.Time` to box a timestamp with its original string value ([#1732](https://github.com/databricks/cli/pull/1732)).
* Fix streaming of stdout, stdin, stderr in cobra test runner ([#1742](https://github.com/databricks/cli/pull/1742)).
Dependency updates:
* Bump github.com/Masterminds/semver/v3 from 3.2.1 to 3.3.0 ([#1741](https://github.com/databricks/cli/pull/1741)).
## [Release] Release v0.227.1 ## [Release] Release v0.227.1
CLI: CLI:

View File

@ -33,12 +33,7 @@ func createGlobError(v dyn.Value, p dyn.Path, message string) diag.Diagnostic {
Severity: diag.Error, Severity: diag.Error,
Summary: fmt.Sprintf("%s: %s", source, message), Summary: fmt.Sprintf("%s: %s", source, message),
Locations: []dyn.Location{v.Location()}, Locations: []dyn.Location{v.Location()},
Paths: []dyn.Path{p},
Paths: []dyn.Path{
// Hack to clone the path. This path copy is mutable.
// To be addressed in a later PR.
p.Append(),
},
} }
} }

View File

@ -406,6 +406,30 @@ func (r *Root) MergeTargetOverrides(name string) error {
return r.updateWithDynamicValue(root) return r.updateWithDynamicValue(root)
} }
var variableKeywords = []string{"default", "lookup"}
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
// A full syntax variable override is a map with only one of the following
// keys: "default", "lookup".
func isFullVariableOverrideDef(v dyn.Value) bool {
mv, ok := v.AsMap()
if !ok {
return false
}
if mv.Len() != 1 {
return false
}
for _, keyword := range variableKeywords {
if _, ok := mv.GetByString(keyword); ok {
return true
}
}
return false
}
// rewriteShorthands performs lightweight rewriting of the configuration // rewriteShorthands performs lightweight rewriting of the configuration
// tree where we allow users to write a shorthand and must rewrite to the full form. // tree where we allow users to write a shorthand and must rewrite to the full form.
func rewriteShorthands(v dyn.Value) (dyn.Value, error) { func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
@ -433,30 +457,27 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
}, variable.Locations()), nil }, variable.Locations()), nil
case dyn.KindMap, dyn.KindSequence: case dyn.KindMap, dyn.KindSequence:
lookup, err := dyn.Get(variable, "lookup") // If it's a full variable definition, leave it as is.
// If lookup is set, we don't want to rewrite the variable and return it as is. if isFullVariableOverrideDef(variable) {
if err == nil && lookup.Kind() != dyn.KindInvalid {
return variable, nil return variable, nil
} }
// Check if the original definition of variable has a type field. // Check if the original definition of variable has a type field.
// If it has a type field, it means the shorthand is a value of a complex type.
// Type might not be found if the variable overriden in a separate file // Type might not be found if the variable overriden in a separate file
// and configuration is not merged yet. // and configuration is not merged yet.
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type"))) typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type")))
if err != nil { if err == nil && typeV.MustString() == "complex" {
return dyn.NewValue(map[string]dyn.Value{
"default": variable,
}, variable.Locations()), nil
}
if typeV.MustString() == "complex" {
return dyn.NewValue(map[string]dyn.Value{ return dyn.NewValue(map[string]dyn.Value{
"type": typeV, "type": typeV,
"default": variable, "default": variable,
}, variable.Locations()), nil }, variable.Locations()), nil
} }
return variable, nil // If it's a shorthand, rewrite it to a full variable definition.
return dyn.NewValue(map[string]dyn.Value{
"default": variable,
}, variable.Locations()), nil
default: default:
return variable, nil return variable, nil

View File

@ -3,7 +3,6 @@ package validate
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sort" "sort"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
@ -66,10 +65,7 @@ func (m *uniqueResourceKeys) Apply(ctx context.Context, b *bundle.Bundle) diag.D
} }
} }
// dyn.Path under the hood is a slice. The code that walks the configuration m.paths = append(m.paths, p)
// tree uses the same underlying slice to track the path as it walks
// the tree. So, we need to clone it here.
m.paths = append(m.paths, slices.Clone(p))
m.locations = append(m.locations, v.Locations()...) m.locations = append(m.locations, v.Locations()...)
resourceMetadata[k] = m resourceMetadata[k] = m

View File

@ -18,10 +18,8 @@ func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic {
return diag.Diagnostic{ return diag.Diagnostic{
Severity: diag.Error, Severity: diag.Error,
Summary: message, Summary: message,
Paths: []dyn.Path{
p.Append(),
},
Locations: l, Locations: l,
Paths: []dyn.Path{p},
} }
} }

View File

@ -76,7 +76,7 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error
source = filepath.Join(b.RootPath, source) source = filepath.Join(b.RootPath, source)
libs[source] = append(libs[source], configLocation{ libs[source] = append(libs[source], configLocation{
configPath: p.Append(), // Hack to get the copy of path configPath: p,
location: v.Location(), location: v.Location(),
}) })

View File

@ -81,9 +81,10 @@ func TestComplexVariablesOverrideWithMultipleFiles(t *testing.T) {
), ),
)) ))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
for _, cluster := range b.Config.Resources.Jobs["my_job"].JobClusters {
require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion) require.Equalf(t, "14.2.x-scala2.11", cluster.NewCluster.SparkVersion, "cluster: %v", cluster.JobClusterKey)
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId) require.Equalf(t, "Standard_DS3_v2", cluster.NewCluster.NodeTypeId, "cluster: %v", cluster.JobClusterKey)
require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers) require.Equalf(t, 4, cluster.NewCluster.NumWorkers, "cluster: %v", cluster.JobClusterKey)
require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"]) require.Equalf(t, "false", cluster.NewCluster.SparkConf["spark.speculation"], "cluster: %v", cluster.JobClusterKey)
}
} }

View File

@ -5,13 +5,48 @@ resources:
jobs: jobs:
my_job: my_job:
job_clusters: job_clusters:
- job_cluster_key: key - job_cluster_key: key1
new_cluster: ${var.cluster} new_cluster: ${var.cluster1}
- job_cluster_key: key2
new_cluster: ${var.cluster2}
- job_cluster_key: key3
new_cluster: ${var.cluster3}
- job_cluster_key: key4
new_cluster: ${var.cluster4}
variables: variables:
cluster: cluster1:
type: complex
description: "A cluster definition"
cluster2:
type: complex
description: "A cluster definition"
cluster3:
type: complex
description: "A cluster definition"
cluster4:
type: complex type: complex
description: "A cluster definition" description: "A cluster definition"
include: include:
- ./variables/*.yml - ./variables/*.yml
targets:
default:
dev:
variables:
cluster3:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false
cluster4:
default:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false

View File

@ -2,7 +2,15 @@ targets:
default: default:
dev: dev:
variables: variables:
cluster: cluster1:
spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2"
num_workers: 4
spark_conf:
spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false
cluster2:
default:
spark_version: "14.2.x-scala2.11" spark_version: "14.2.x-scala2.11"
node_type_id: "Standard_DS3_v2" node_type_id: "Standard_DS3_v2"
num_workers: 4 num_workers: 4

View File

@ -70,7 +70,7 @@ type visitOptions struct {
func visit(v Value, prefix Path, suffix Pattern, opts visitOptions) (Value, error) { func visit(v Value, prefix Path, suffix Pattern, opts visitOptions) (Value, error) {
if len(suffix) == 0 { if len(suffix) == 0 {
return opts.fn(prefix, v) return opts.fn(slices.Clone(prefix), v)
} }
// Initialize prefix if it is empty. // Initialize prefix if it is empty.

View File

@ -21,7 +21,7 @@ func Foreach(fn MapFunc) MapFunc {
for _, pair := range m.Pairs() { for _, pair := range m.Pairs() {
pk := pair.Key pk := pair.Key
pv := pair.Value pv := pair.Value
nv, err := fn(append(p, Key(pk.MustString())), pv) nv, err := fn(p.Append(Key(pk.MustString())), pv)
if err != nil { if err != nil {
return InvalidValue, err return InvalidValue, err
} }
@ -32,7 +32,7 @@ func Foreach(fn MapFunc) MapFunc {
s := slices.Clone(v.MustSequence()) s := slices.Clone(v.MustSequence())
for i, value := range s { for i, value := range s {
var err error var err error
s[i], err = fn(append(p, Index(i)), value) s[i], err = fn(p.Append(Index(i)), value)
if err != nil { if err != nil {
return InvalidValue, err return InvalidValue, err
} }

36
libs/dyn/visit_test.go Normal file
View File

@ -0,0 +1,36 @@
package dyn_test
import (
"testing"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
)
func TestVisitCallbackPathCopy(t *testing.T) {
vin := dyn.V(map[string]dyn.Value{
"foo": dyn.V(42),
"bar": dyn.V(43),
})
var paths []dyn.Path
// The callback should receive a copy of the path.
// If the same underlying value is used, all collected paths will be the same.
// This test uses `MapByPattern` to collect all paths in the map.
// Visit itself doesn't have public functions and we exclusively use black-box testing for this package.
_, _ = dyn.MapByPattern(vin, dyn.NewPattern(dyn.AnyKey()), func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
paths = append(paths, p)
return v, nil
})
// Verify that the paths retained their original values.
var strings []string
for _, p := range paths {
strings = append(strings, p.String())
}
assert.ElementsMatch(t, strings, []string{
"foo",
"bar",
})
}