mirror of https://github.com/databricks/cli.git
Compare commits
2 Commits
e220f9ddd6
...
e2c1d51d84
Author | SHA1 | Date |
---|---|---|
|
e2c1d51d84 | |
|
bcab6ca37b |
27
CHANGELOG.md
27
CHANGELOG.md
|
@ -1,5 +1,32 @@
|
|||
# Version changelog
|
||||
|
||||
## [Release] Release v0.228.1
|
||||
|
||||
Bundles:
|
||||
* Added listing cluster filtering for cluster lookups ([#1754](https://github.com/databricks/cli/pull/1754)).
|
||||
* Expand library globs relative to the sync root ([#1756](https://github.com/databricks/cli/pull/1756)).
|
||||
* Fixed generated YAML missing 'default' for empty values ([#1765](https://github.com/databricks/cli/pull/1765)).
|
||||
* Use periodic triggers in all templates ([#1739](https://github.com/databricks/cli/pull/1739)).
|
||||
* Use the friendly name of service principals when shortening their name ([#1770](https://github.com/databricks/cli/pull/1770)).
|
||||
* Fixed detecting full syntax variable override which includes type field ([#1775](https://github.com/databricks/cli/pull/1775)).
|
||||
|
||||
Internal:
|
||||
* Pass copy of `dyn.Path` to callback function ([#1747](https://github.com/databricks/cli/pull/1747)).
|
||||
* Make bundle JSON schema modular with `` ([#1700](https://github.com/databricks/cli/pull/1700)).
|
||||
* Alias variables block in the `Target` struct ([#1748](https://github.com/databricks/cli/pull/1748)).
|
||||
* Add end to end integration tests for bundle JSON schema ([#1726](https://github.com/databricks/cli/pull/1726)).
|
||||
* Fix artifact upload integration tests ([#1767](https://github.com/databricks/cli/pull/1767)).
|
||||
|
||||
API Changes:
|
||||
* Added `databricks quality-monitors regenerate-dashboard` command.
|
||||
|
||||
OpenAPI commit d05898328669a3f8ab0c2ecee37db2673d3ea3f7 (2024-09-04)
|
||||
Dependency updates:
|
||||
* Bump golang.org/x/term from 0.23.0 to 0.24.0 ([#1757](https://github.com/databricks/cli/pull/1757)).
|
||||
* Bump golang.org/x/oauth2 from 0.22.0 to 0.23.0 ([#1761](https://github.com/databricks/cli/pull/1761)).
|
||||
* Bump golang.org/x/text from 0.17.0 to 0.18.0 ([#1759](https://github.com/databricks/cli/pull/1759)).
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.45.0 to 0.46.0 ([#1760](https://github.com/databricks/cli/pull/1760)).
|
||||
|
||||
## [Release] Release v0.228.0
|
||||
|
||||
CLI:
|
||||
|
|
|
@ -409,18 +409,33 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
|||
var variableKeywords = []string{"default", "lookup"}
|
||||
|
||||
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
|
||||
// A full syntax variable override is a map with only one of the following
|
||||
// keys: "default", "lookup".
|
||||
// A full syntax variable override is a map with either 1 of 2 keys.
|
||||
// If it's 2 keys, the keys should be "default" and "type".
|
||||
// If it's 1 key, the key should be one of the following keys: "default", "lookup".
|
||||
func isFullVariableOverrideDef(v dyn.Value) bool {
|
||||
mv, ok := v.AsMap()
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if mv.Len() != 1 {
|
||||
// If the map has more than 2 keys, it is not a full variable override.
|
||||
if mv.Len() > 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
// If the map has 2 keys, one of them should be "default" and the other is "type"
|
||||
if mv.Len() == 2 {
|
||||
if _, ok := mv.GetByString("type"); !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
if _, ok := mv.GetByString("default"); !ok {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
for _, keyword := range variableKeywords {
|
||||
if _, ok := mv.GetByString(keyword); ok {
|
||||
return true
|
||||
|
|
|
@ -88,3 +88,21 @@ func TestComplexVariablesOverrideWithMultipleFiles(t *testing.T) {
|
|||
require.Equalf(t, "false", cluster.NewCluster.SparkConf["spark.speculation"], "cluster: %v", cluster.JobClusterKey)
|
||||
}
|
||||
}
|
||||
|
||||
func TestComplexVariablesOverrideWithFullSyntax(t *testing.T) {
|
||||
b, diags := loadTargetWithDiags("variables/complex", "dev")
|
||||
require.Empty(t, diags)
|
||||
|
||||
diags = bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SetVariables(),
|
||||
mutator.ResolveVariableReferencesInComplexVariables(),
|
||||
mutator.ResolveVariableReferences(
|
||||
"variables",
|
||||
),
|
||||
))
|
||||
require.NoError(t, diags.Error())
|
||||
require.Empty(t, diags)
|
||||
|
||||
complexvar := b.Config.Variables["complexvar"].Value
|
||||
require.Equal(t, map[string]interface{}{"key1": "1", "key2": "2", "key3": "3"}, complexvar)
|
||||
}
|
||||
|
|
|
@ -35,6 +35,13 @@ variables:
|
|||
- jar: "/path/to/jar"
|
||||
- egg: "/path/to/egg"
|
||||
- whl: "/path/to/whl"
|
||||
complexvar:
|
||||
type: complex
|
||||
description: "A complex variable"
|
||||
default:
|
||||
key1: "value1"
|
||||
key2: "value2"
|
||||
key3: "value3"
|
||||
|
||||
|
||||
targets:
|
||||
|
@ -49,3 +56,9 @@ targets:
|
|||
spark_conf:
|
||||
spark.speculation: false
|
||||
spark.databricks.delta.retentionDurationCheck.enabled: false
|
||||
complexvar:
|
||||
type: complex
|
||||
default:
|
||||
key1: "1"
|
||||
key2: "2"
|
||||
key3: "3"
|
||||
|
|
Loading…
Reference in New Issue