mirror of https://github.com/databricks/cli.git
Use dynamic configuration model in bundles (#1098)
## Changes This is a fundamental change to how we load and process bundle configuration. We now depend on the configuration being represented as a `dyn.Value`. This representation is functionally equivalent to Go's `any` (it is variadic) and allows us to capture metadata associated with a value, such as where it was defined (e.g. file, line, and column). It also allows us to represent Go's zero values properly (e.g. empty string, integer equal to 0, or boolean false). Using this representation allows us to let the configuration model deviate from the typed structure we have been relying on so far (`config.Root`). We need to deviate from these types when using variables for fields that are not a string themselves. For example, using `${var.num_workers}` for an integer `workers` field was impossible until now (though not implemented in this change). The loader for a `dyn.Value` includes functionality to capture any and all type mismatches between the user-defined configuration and the expected types. These mismatches can be surfaced as validation errors in future PRs. Given that many mutators expect the typed struct to be the source of truth, this change converts between the dynamic representation and the typed representation on mutator entry and exit. Existing mutators can continue to modify the typed representation and these modifications are reflected in the dynamic representation (see `MarkMutatorEntry` and `MarkMutatorExit` in `bundle/config/root.go`). Required changes included in this change: * The existing interpolation package is removed in favor of `libs/dyn/dynvar`. * Functionality to merge job clusters, job tasks, and pipeline clusters are now all broken out into their own mutators. To be implemented later: * Allow variable references for non-string types. * Surface diagnostics about the configuration provided by the user in the validation output. * Some mutators use a resource's configuration file path to resolve related relative paths. These depend on `bundle/config/paths.Path` being set and populated through `ConfigureConfigFilePath`. Instead, they should interact with the dynamically typed configuration directly. Doing this also unlocks being able to differentiate different base paths used within a job (e.g. a task override with a relative path defined in a directory other than the base job). ## Tests * Existing unit tests pass (some have been modified to accommodate) * Integration tests pass
This commit is contained in:
parent
5f59572cb3
commit
87dd46a3f8
5
NOTICE
5
NOTICE
|
@ -57,11 +57,6 @@ google/uuid - https://github.com/google/uuid
|
||||||
Copyright (c) 2009,2014 Google Inc. All rights reserved.
|
Copyright (c) 2009,2014 Google Inc. All rights reserved.
|
||||||
License - https://github.com/google/uuid/blob/master/LICENSE
|
License - https://github.com/google/uuid/blob/master/LICENSE
|
||||||
|
|
||||||
imdario/mergo - https://github.com/imdario/mergo
|
|
||||||
Copyright (c) 2013 Dario Castañé. All rights reserved.
|
|
||||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
|
||||||
License - https://github.com/imdario/mergo/blob/master/LICENSE
|
|
||||||
|
|
||||||
manifoldco/promptui - https://github.com/manifoldco/promptui
|
manifoldco/promptui - https://github.com/manifoldco/promptui
|
||||||
Copyright (c) 2017, Arigato Machine Inc. All rights reserved.
|
Copyright (c) 2017, Arigato Machine Inc. All rights reserved.
|
||||||
License - https://github.com/manifoldco/promptui/blob/master/LICENSE.md
|
License - https://github.com/manifoldco/promptui/blob/master/LICENSE.md
|
||||||
|
|
|
@ -10,9 +10,9 @@ import (
|
||||||
|
|
||||||
type Artifacts map[string]*Artifact
|
type Artifacts map[string]*Artifact
|
||||||
|
|
||||||
func (artifacts Artifacts) SetConfigFilePath(path string) {
|
func (artifacts Artifacts) ConfigureConfigFilePath() {
|
||||||
for _, artifact := range artifacts {
|
for _, artifact := range artifacts {
|
||||||
artifact.ConfigFilePath = path
|
artifact.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,8 @@ type Git struct {
|
||||||
BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"`
|
BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"`
|
||||||
|
|
||||||
// Inferred is set to true if the Git details were inferred and weren't set explicitly
|
// Inferred is set to true if the Git details were inferred and weren't set explicitly
|
||||||
Inferred bool `json:"-" bundle:"readonly"`
|
Inferred bool `json:"inferred,omitempty" bundle:"readonly"`
|
||||||
|
|
||||||
// The actual branch according to Git (may be different from the configured branch)
|
// The actual branch according to Git (may be different from the configured branch)
|
||||||
ActualBranch string `json:"-" bundle:"readonly"`
|
ActualBranch string `json:"actual_branch,omitempty" bundle:"readonly"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,254 +0,0 @@
|
||||||
package interpolation
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"slices"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
|
||||||
"golang.org/x/exp/maps"
|
|
||||||
)
|
|
||||||
|
|
||||||
const Delimiter = "."
|
|
||||||
|
|
||||||
// must start with alphabet, support hyphens and underscores in middle but must end with character
|
|
||||||
var re = regexp.MustCompile(`\$\{([a-zA-Z]+([-_]?[a-zA-Z0-9]+)*(\.[a-zA-Z]+([-_]?[a-zA-Z0-9]+)*)*)\}`)
|
|
||||||
|
|
||||||
type stringField struct {
|
|
||||||
path string
|
|
||||||
|
|
||||||
getter
|
|
||||||
setter
|
|
||||||
}
|
|
||||||
|
|
||||||
func newStringField(path string, g getter, s setter) *stringField {
|
|
||||||
return &stringField{
|
|
||||||
path: path,
|
|
||||||
|
|
||||||
getter: g,
|
|
||||||
setter: s,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stringField) dependsOn() []string {
|
|
||||||
var out []string
|
|
||||||
m := re.FindAllStringSubmatch(s.Get(), -1)
|
|
||||||
for i := range m {
|
|
||||||
out = append(out, m[i][1])
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *stringField) interpolate(fns []LookupFunction, lookup map[string]string) {
|
|
||||||
out := re.ReplaceAllStringFunc(s.Get(), func(s string) string {
|
|
||||||
// Turn the whole match into the submatch.
|
|
||||||
match := re.FindStringSubmatch(s)
|
|
||||||
for _, fn := range fns {
|
|
||||||
v, err := fn(match[1], lookup)
|
|
||||||
if errors.Is(err, ErrSkipInterpolation) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
return v
|
|
||||||
}
|
|
||||||
|
|
||||||
// No substitution.
|
|
||||||
return s
|
|
||||||
})
|
|
||||||
|
|
||||||
s.Set(out)
|
|
||||||
}
|
|
||||||
|
|
||||||
type accumulator struct {
|
|
||||||
// all string fields in the bundle config
|
|
||||||
strings map[string]*stringField
|
|
||||||
|
|
||||||
// contains path -> resolved_string mapping for string fields in the config
|
|
||||||
// The resolved strings will NOT contain any variable references that could
|
|
||||||
// have been resolved, however there might still be references that cannot
|
|
||||||
// be resolved
|
|
||||||
memo map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
// jsonFieldName returns the name in a field's `json` tag.
|
|
||||||
// Returns the empty string if it isn't set.
|
|
||||||
func jsonFieldName(sf reflect.StructField) string {
|
|
||||||
tag, ok := sf.Tag.Lookup("json")
|
|
||||||
if !ok {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
parts := strings.Split(tag, ",")
|
|
||||||
if parts[0] == "-" {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return parts[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *accumulator) walkStruct(scope []string, rv reflect.Value) {
|
|
||||||
num := rv.NumField()
|
|
||||||
for i := 0; i < num; i++ {
|
|
||||||
sf := rv.Type().Field(i)
|
|
||||||
f := rv.Field(i)
|
|
||||||
|
|
||||||
// Walk field with the same scope for anonymous (embedded) fields.
|
|
||||||
if sf.Anonymous {
|
|
||||||
a.walk(scope, f, anySetter{f})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Skip unnamed fields.
|
|
||||||
fieldName := jsonFieldName(rv.Type().Field(i))
|
|
||||||
if fieldName == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
a.walk(append(scope, fieldName), f, anySetter{f})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a *accumulator) walk(scope []string, rv reflect.Value, s setter) {
|
|
||||||
// Dereference pointer.
|
|
||||||
if rv.Type().Kind() == reflect.Pointer {
|
|
||||||
// Skip nil pointers.
|
|
||||||
if rv.IsNil() {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rv = rv.Elem()
|
|
||||||
s = anySetter{rv}
|
|
||||||
}
|
|
||||||
|
|
||||||
switch rv.Type().Kind() {
|
|
||||||
case reflect.String:
|
|
||||||
path := strings.Join(scope, Delimiter)
|
|
||||||
a.strings[path] = newStringField(path, anyGetter{rv}, s)
|
|
||||||
|
|
||||||
// register alias for variable value. `var.foo` would be the alias for
|
|
||||||
// `variables.foo.value`
|
|
||||||
if len(scope) == 3 && scope[0] == "variables" && scope[2] == "value" {
|
|
||||||
aliasPath := strings.Join([]string{variable.VariableReferencePrefix, scope[1]}, Delimiter)
|
|
||||||
a.strings[aliasPath] = a.strings[path]
|
|
||||||
}
|
|
||||||
case reflect.Struct:
|
|
||||||
a.walkStruct(scope, rv)
|
|
||||||
case reflect.Map:
|
|
||||||
if rv.Type().Key().Kind() != reflect.String {
|
|
||||||
panic("only support string keys in map")
|
|
||||||
}
|
|
||||||
keys := rv.MapKeys()
|
|
||||||
for _, key := range keys {
|
|
||||||
a.walk(append(scope, key.String()), rv.MapIndex(key), mapSetter{rv, key})
|
|
||||||
}
|
|
||||||
case reflect.Slice:
|
|
||||||
n := rv.Len()
|
|
||||||
name := scope[len(scope)-1]
|
|
||||||
base := scope[:len(scope)-1]
|
|
||||||
for i := 0; i < n; i++ {
|
|
||||||
element := rv.Index(i)
|
|
||||||
a.walk(append(base, fmt.Sprintf("%s[%d]", name, i)), element, anySetter{element})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// walk and gather all string fields in the config
|
|
||||||
func (a *accumulator) start(v any) {
|
|
||||||
rv := reflect.ValueOf(v)
|
|
||||||
if rv.Type().Kind() != reflect.Pointer {
|
|
||||||
panic("expect pointer")
|
|
||||||
}
|
|
||||||
rv = rv.Elem()
|
|
||||||
if rv.Type().Kind() != reflect.Struct {
|
|
||||||
panic("expect struct")
|
|
||||||
}
|
|
||||||
|
|
||||||
a.strings = make(map[string]*stringField)
|
|
||||||
a.memo = make(map[string]string)
|
|
||||||
a.walk([]string{}, rv, nilSetter{})
|
|
||||||
}
|
|
||||||
|
|
||||||
// recursively interpolate variables in a depth first manner
|
|
||||||
func (a *accumulator) Resolve(path string, seenPaths []string, fns ...LookupFunction) error {
|
|
||||||
// return early if the path is already resolved
|
|
||||||
if _, ok := a.memo[path]; ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// fetch the string node to resolve
|
|
||||||
field, ok := a.strings[path]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("no value found for interpolation reference: ${%s}", path)
|
|
||||||
}
|
|
||||||
|
|
||||||
// return early if the string field has no variables to interpolate
|
|
||||||
if len(field.dependsOn()) == 0 {
|
|
||||||
a.memo[path] = field.Get()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolve all variables refered in the root string field
|
|
||||||
for _, childFieldPath := range field.dependsOn() {
|
|
||||||
// error if there is a loop in variable interpolation
|
|
||||||
if slices.Contains(seenPaths, childFieldPath) {
|
|
||||||
return fmt.Errorf("cycle detected in field resolution: %s", strings.Join(append(seenPaths, childFieldPath), " -> "))
|
|
||||||
}
|
|
||||||
|
|
||||||
// recursive resolve variables in the child fields
|
|
||||||
err := a.Resolve(childFieldPath, append(seenPaths, childFieldPath), fns...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// interpolate root string once all variable references in it have been resolved
|
|
||||||
field.interpolate(fns, a.memo)
|
|
||||||
|
|
||||||
// record interpolated string in memo
|
|
||||||
a.memo[path] = field.Get()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Interpolate all string fields in the config
|
|
||||||
func (a *accumulator) expand(fns ...LookupFunction) error {
|
|
||||||
// sorting paths for stable order of iteration
|
|
||||||
paths := maps.Keys(a.strings)
|
|
||||||
sort.Strings(paths)
|
|
||||||
|
|
||||||
// iterate over paths for all strings fields in the config
|
|
||||||
for _, path := range paths {
|
|
||||||
err := a.Resolve(path, []string{path}, fns...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type interpolate struct {
|
|
||||||
fns []LookupFunction
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *interpolate) expand(v any) error {
|
|
||||||
a := accumulator{}
|
|
||||||
a.start(v)
|
|
||||||
return a.expand(m.fns...)
|
|
||||||
}
|
|
||||||
|
|
||||||
func Interpolate(fns ...LookupFunction) bundle.Mutator {
|
|
||||||
return &interpolate{fns: fns}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *interpolate) Name() string {
|
|
||||||
return "Interpolate"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *interpolate) Apply(_ context.Context, b *bundle.Bundle) error {
|
|
||||||
return m.expand(&b.Config)
|
|
||||||
}
|
|
|
@ -1,251 +0,0 @@
|
||||||
package interpolation
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/config"
|
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
type nest struct {
|
|
||||||
X string `json:"x"`
|
|
||||||
Y *string `json:"y"`
|
|
||||||
Z map[string]string `json:"z"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type foo struct {
|
|
||||||
A string `json:"a"`
|
|
||||||
B string `json:"b"`
|
|
||||||
C string `json:"c"`
|
|
||||||
|
|
||||||
// Pointer field
|
|
||||||
D *string `json:"d"`
|
|
||||||
|
|
||||||
// Struct field
|
|
||||||
E nest `json:"e"`
|
|
||||||
|
|
||||||
// Map field
|
|
||||||
F map[string]string `json:"f"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func expand(v any) error {
|
|
||||||
a := accumulator{}
|
|
||||||
a.start(v)
|
|
||||||
return a.expand(DefaultLookup)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationVariables(t *testing.T) {
|
|
||||||
f := foo{
|
|
||||||
A: "a",
|
|
||||||
B: "${a}",
|
|
||||||
C: "${a}",
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "a", f.A)
|
|
||||||
assert.Equal(t, "a", f.B)
|
|
||||||
assert.Equal(t, "a", f.C)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationVariablesSpecialChars(t *testing.T) {
|
|
||||||
type bar struct {
|
|
||||||
A string `json:"a-b"`
|
|
||||||
B string `json:"b_c"`
|
|
||||||
C string `json:"c-_a"`
|
|
||||||
}
|
|
||||||
f := bar{
|
|
||||||
A: "a",
|
|
||||||
B: "${a-b}",
|
|
||||||
C: "${a-b}",
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "a", f.A)
|
|
||||||
assert.Equal(t, "a", f.B)
|
|
||||||
assert.Equal(t, "a", f.C)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationValidMatches(t *testing.T) {
|
|
||||||
expectedMatches := map[string]string{
|
|
||||||
"${hello_world.world_world}": "hello_world.world_world",
|
|
||||||
"${helloworld.world-world}": "helloworld.world-world",
|
|
||||||
"${hello-world.world-world}": "hello-world.world-world",
|
|
||||||
}
|
|
||||||
for interpolationStr, expectedMatch := range expectedMatches {
|
|
||||||
match := re.FindStringSubmatch(interpolationStr)
|
|
||||||
assert.True(t, len(match) > 0,
|
|
||||||
"Failed to match %s and find %s", interpolationStr, expectedMatch)
|
|
||||||
assert.Equal(t, expectedMatch, match[1],
|
|
||||||
"Failed to match the exact pattern %s and find %s", interpolationStr, expectedMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationInvalidMatches(t *testing.T) {
|
|
||||||
invalidMatches := []string{
|
|
||||||
"${hello_world-.world_world}", // the first segment ending must not end with hyphen (-)
|
|
||||||
"${hello_world-_.world_world}", // the first segment ending must not end with underscore (_)
|
|
||||||
"${helloworld.world-world-}", // second segment must not end with hyphen (-)
|
|
||||||
"${helloworld-.world-world}", // first segment must not end with hyphen (-)
|
|
||||||
"${helloworld.-world-world}", // second segment must not start with hyphen (-)
|
|
||||||
"${-hello-world.-world-world-}", // must not start or end with hyphen (-)
|
|
||||||
"${_-_._-_.id}", // cannot use _- in sequence
|
|
||||||
"${0helloworld.world-world}", // interpolated first section shouldn't start with number
|
|
||||||
"${helloworld.9world-world}", // interpolated second section shouldn't start with number
|
|
||||||
"${a-a.a-_a-a.id}", // fails because of -_ in the second segment
|
|
||||||
"${a-a.a--a-a.id}", // fails because of -- in the second segment
|
|
||||||
}
|
|
||||||
for _, invalidMatch := range invalidMatches {
|
|
||||||
match := re.FindStringSubmatch(invalidMatch)
|
|
||||||
assert.True(t, len(match) == 0, "Should be invalid interpolation: %s", invalidMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationWithPointers(t *testing.T) {
|
|
||||||
fd := "${a}"
|
|
||||||
f := foo{
|
|
||||||
A: "a",
|
|
||||||
D: &fd,
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "a", f.A)
|
|
||||||
assert.Equal(t, "a", *f.D)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationWithStruct(t *testing.T) {
|
|
||||||
fy := "${e.x}"
|
|
||||||
f := foo{
|
|
||||||
A: "${e.x}",
|
|
||||||
E: nest{
|
|
||||||
X: "x",
|
|
||||||
Y: &fy,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "x", f.A)
|
|
||||||
assert.Equal(t, "x", f.E.X)
|
|
||||||
assert.Equal(t, "x", *f.E.Y)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationWithMap(t *testing.T) {
|
|
||||||
f := foo{
|
|
||||||
A: "${f.a}",
|
|
||||||
F: map[string]string{
|
|
||||||
"a": "a",
|
|
||||||
"b": "${f.a}",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "a", f.A)
|
|
||||||
assert.Equal(t, "a", f.F["a"])
|
|
||||||
assert.Equal(t, "a", f.F["b"])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationWithResursiveVariableReferences(t *testing.T) {
|
|
||||||
f := foo{
|
|
||||||
A: "a",
|
|
||||||
B: "(${a})",
|
|
||||||
C: "${a} ${b}",
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "a", f.A)
|
|
||||||
assert.Equal(t, "(a)", f.B)
|
|
||||||
assert.Equal(t, "a (a)", f.C)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationVariableLoopError(t *testing.T) {
|
|
||||||
d := "${b}"
|
|
||||||
f := foo{
|
|
||||||
A: "a",
|
|
||||||
B: "${c}",
|
|
||||||
C: "${d}",
|
|
||||||
D: &d,
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&f)
|
|
||||||
assert.ErrorContains(t, err, "cycle detected in field resolution: b -> c -> d -> b")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationForVariables(t *testing.T) {
|
|
||||||
foo := "abc"
|
|
||||||
bar := "${var.foo} def"
|
|
||||||
apple := "${var.foo} ${var.bar}"
|
|
||||||
config := config.Root{
|
|
||||||
Variables: map[string]*variable.Variable{
|
|
||||||
"foo": {
|
|
||||||
Value: &foo,
|
|
||||||
},
|
|
||||||
"bar": {
|
|
||||||
Value: &bar,
|
|
||||||
},
|
|
||||||
"apple": {
|
|
||||||
Value: &apple,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "${var.apple} ${var.foo}",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&config)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
assert.Equal(t, "abc", *(config.Variables["foo"].Value))
|
|
||||||
assert.Equal(t, "abc def", *(config.Variables["bar"].Value))
|
|
||||||
assert.Equal(t, "abc abc def", *(config.Variables["apple"].Value))
|
|
||||||
assert.Equal(t, "abc abc def abc", config.Bundle.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationLoopForVariables(t *testing.T) {
|
|
||||||
foo := "${var.bar}"
|
|
||||||
bar := "${var.foo}"
|
|
||||||
config := config.Root{
|
|
||||||
Variables: map[string]*variable.Variable{
|
|
||||||
"foo": {
|
|
||||||
Value: &foo,
|
|
||||||
},
|
|
||||||
"bar": {
|
|
||||||
Value: &bar,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "${var.foo}",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&config)
|
|
||||||
assert.ErrorContains(t, err, "cycle detected in field resolution: bundle.name -> var.foo -> var.bar -> var.foo")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInterpolationInvalidVariableReference(t *testing.T) {
|
|
||||||
foo := "abc"
|
|
||||||
config := config.Root{
|
|
||||||
Variables: map[string]*variable.Variable{
|
|
||||||
"foo": {
|
|
||||||
Value: &foo,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "${vars.foo}",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := expand(&config)
|
|
||||||
assert.ErrorContains(t, err, "no value found for interpolation reference: ${vars.foo}")
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
package interpolation
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"slices"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// LookupFunction returns the value to rewrite a path expression to.
|
|
||||||
type LookupFunction func(path string, depends map[string]string) (string, error)
|
|
||||||
|
|
||||||
// ErrSkipInterpolation can be used to fall through from [LookupFunction].
|
|
||||||
var ErrSkipInterpolation = errors.New("skip interpolation")
|
|
||||||
|
|
||||||
// DefaultLookup looks up the specified path in the map.
|
|
||||||
// It returns an error if it doesn't exist.
|
|
||||||
func DefaultLookup(path string, lookup map[string]string) (string, error) {
|
|
||||||
v, ok := lookup[path]
|
|
||||||
if !ok {
|
|
||||||
return "", fmt.Errorf("expected to find value for path: %s", path)
|
|
||||||
}
|
|
||||||
return v, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func pathPrefixMatches(prefix []string, path string) bool {
|
|
||||||
parts := strings.Split(path, Delimiter)
|
|
||||||
return len(parts) >= len(prefix) && slices.Compare(prefix, parts[0:len(prefix)]) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExcludeLookupsInPath is a lookup function that skips lookups for the specified path.
|
|
||||||
func ExcludeLookupsInPath(exclude ...string) LookupFunction {
|
|
||||||
return func(path string, lookup map[string]string) (string, error) {
|
|
||||||
if pathPrefixMatches(exclude, path) {
|
|
||||||
return "", ErrSkipInterpolation
|
|
||||||
}
|
|
||||||
|
|
||||||
return DefaultLookup(path, lookup)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IncludeLookupsInPath is a lookup function that limits lookups to the specified path.
|
|
||||||
func IncludeLookupsInPath(include ...string) LookupFunction {
|
|
||||||
return func(path string, lookup map[string]string) (string, error) {
|
|
||||||
if !pathPrefixMatches(include, path) {
|
|
||||||
return "", ErrSkipInterpolation
|
|
||||||
}
|
|
||||||
|
|
||||||
return DefaultLookup(path, lookup)
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,81 +0,0 @@
|
||||||
package interpolation
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
type interpolationFixture struct {
|
|
||||||
A map[string]string `json:"a"`
|
|
||||||
B map[string]string `json:"b"`
|
|
||||||
C map[string]string `json:"c"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func fixture() interpolationFixture {
|
|
||||||
return interpolationFixture{
|
|
||||||
A: map[string]string{
|
|
||||||
"x": "1",
|
|
||||||
},
|
|
||||||
B: map[string]string{
|
|
||||||
"x": "2",
|
|
||||||
},
|
|
||||||
C: map[string]string{
|
|
||||||
"ax": "${a.x}",
|
|
||||||
"bx": "${b.x}",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestExcludePath(t *testing.T) {
|
|
||||||
tmp := fixture()
|
|
||||||
m := interpolate{
|
|
||||||
fns: []LookupFunction{
|
|
||||||
ExcludeLookupsInPath("a"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := m.expand(&tmp)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "1", tmp.A["x"])
|
|
||||||
assert.Equal(t, "2", tmp.B["x"])
|
|
||||||
assert.Equal(t, "${a.x}", tmp.C["ax"])
|
|
||||||
assert.Equal(t, "2", tmp.C["bx"])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIncludePath(t *testing.T) {
|
|
||||||
tmp := fixture()
|
|
||||||
m := interpolate{
|
|
||||||
fns: []LookupFunction{
|
|
||||||
IncludeLookupsInPath("a"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := m.expand(&tmp)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "1", tmp.A["x"])
|
|
||||||
assert.Equal(t, "2", tmp.B["x"])
|
|
||||||
assert.Equal(t, "1", tmp.C["ax"])
|
|
||||||
assert.Equal(t, "${b.x}", tmp.C["bx"])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIncludePathMultiple(t *testing.T) {
|
|
||||||
tmp := fixture()
|
|
||||||
m := interpolate{
|
|
||||||
fns: []LookupFunction{
|
|
||||||
IncludeLookupsInPath("a"),
|
|
||||||
IncludeLookupsInPath("b"),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := m.expand(&tmp)
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "1", tmp.A["x"])
|
|
||||||
assert.Equal(t, "2", tmp.B["x"])
|
|
||||||
assert.Equal(t, "1", tmp.C["ax"])
|
|
||||||
assert.Equal(t, "2", tmp.C["bx"])
|
|
||||||
}
|
|
|
@ -1,48 +0,0 @@
|
||||||
package interpolation
|
|
||||||
|
|
||||||
import "reflect"
|
|
||||||
|
|
||||||
// String values in maps are not addressable and therefore not settable
|
|
||||||
// through Go's reflection mechanism. This interface solves this limitation
|
|
||||||
// by wrapping the setter differently for addressable values and map values.
|
|
||||||
type setter interface {
|
|
||||||
Set(string)
|
|
||||||
}
|
|
||||||
|
|
||||||
type nilSetter struct{}
|
|
||||||
|
|
||||||
func (nilSetter) Set(_ string) {
|
|
||||||
panic("nil setter")
|
|
||||||
}
|
|
||||||
|
|
||||||
type anySetter struct {
|
|
||||||
rv reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s anySetter) Set(str string) {
|
|
||||||
s.rv.SetString(str)
|
|
||||||
}
|
|
||||||
|
|
||||||
type mapSetter struct {
|
|
||||||
// map[string]string
|
|
||||||
m reflect.Value
|
|
||||||
|
|
||||||
// key
|
|
||||||
k reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s mapSetter) Set(str string) {
|
|
||||||
s.m.SetMapIndex(s.k, reflect.ValueOf(str))
|
|
||||||
}
|
|
||||||
|
|
||||||
type getter interface {
|
|
||||||
Get() string
|
|
||||||
}
|
|
||||||
|
|
||||||
type anyGetter struct {
|
|
||||||
rv reflect.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g anyGetter) Get() string {
|
|
||||||
return g.rv.String()
|
|
||||||
}
|
|
|
@ -0,0 +1,63 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
type environmentsToTargets struct{}
|
||||||
|
|
||||||
|
func EnvironmentsToTargets() bundle.Mutator {
|
||||||
|
return &environmentsToTargets{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *environmentsToTargets) Name() string {
|
||||||
|
return "EnvironmentsToTargets"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *environmentsToTargets) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
// Short circuit if the "environments" key is not set.
|
||||||
|
// This is the common case.
|
||||||
|
if b.Config.Environments == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// The "environments" key is set; validate and rewrite it to "targets".
|
||||||
|
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
environments := v.Get("environments")
|
||||||
|
targets := v.Get("targets")
|
||||||
|
|
||||||
|
// Return an error if both "environments" and "targets" are set.
|
||||||
|
if environments != dyn.NilValue && targets != dyn.NilValue {
|
||||||
|
return dyn.NilValue, fmt.Errorf(
|
||||||
|
"both 'environments' and 'targets' are specified; only 'targets' should be used: %s",
|
||||||
|
environments.Location().String(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rewrite "environments" to "targets".
|
||||||
|
if environments != dyn.NilValue && targets == dyn.NilValue {
|
||||||
|
nv, err := dyn.Set(v, "targets", environments)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
// Drop the "environments" key.
|
||||||
|
return dyn.Walk(nv, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
switch len(p) {
|
||||||
|
case 0:
|
||||||
|
return v, nil
|
||||||
|
case 1:
|
||||||
|
if p[0] == dyn.Key("environments") {
|
||||||
|
return v, dyn.ErrDrop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return v, dyn.ErrSkip
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return v, nil
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,65 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestEnvironmentsToTargetsWithBothDefined(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Environments: map[string]*config.Target{
|
||||||
|
"name": {
|
||||||
|
Mode: config.Development,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Targets: map[string]*config.Target{
|
||||||
|
"name": {
|
||||||
|
Mode: config.Development,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
|
||||||
|
assert.ErrorContains(t, err, `both 'environments' and 'targets' are specified;`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvironmentsToTargetsWithEnvironmentsDefined(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Environments: map[string]*config.Target{
|
||||||
|
"name": {
|
||||||
|
Mode: config.Development,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, b.Config.Environments, 0)
|
||||||
|
assert.Len(t, b.Config.Targets, 1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEnvironmentsToTargetsWithTargetsDefined(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Targets: map[string]*config.Target{
|
||||||
|
"name": {
|
||||||
|
Mode: config.Development,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.EnvironmentsToTargets())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, b.Config.Environments, 0)
|
||||||
|
assert.Len(t, b.Config.Targets, 1)
|
||||||
|
}
|
|
@ -8,8 +8,8 @@ import (
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/paths"
|
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -42,9 +42,6 @@ func TestExpandGlobPathsInPipelines(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -98,6 +95,8 @@ func TestExpandGlobPathsInPipelines(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
m := ExpandPipelineGlobPaths()
|
m := ExpandPipelineGlobPaths()
|
||||||
err := bundle.Apply(context.Background(), b, m)
|
err := bundle.Apply(context.Background(), b, m)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -0,0 +1,42 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/merge"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mergeJobClusters struct{}
|
||||||
|
|
||||||
|
func MergeJobClusters() bundle.Mutator {
|
||||||
|
return &mergeJobClusters{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobClusters) Name() string {
|
||||||
|
return "MergeJobClusters"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobClusters) jobClusterKey(v dyn.Value) string {
|
||||||
|
switch v.Kind() {
|
||||||
|
case dyn.KindNil:
|
||||||
|
return ""
|
||||||
|
case dyn.KindString:
|
||||||
|
return v.MustString()
|
||||||
|
default:
|
||||||
|
panic("job cluster key must be a string")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobClusters) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
if v == dyn.NilValue {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Map(job, "job_clusters", merge.ElementsByKey("job_cluster_key", m.jobClusterKey))
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,105 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMergeJobClusters(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"foo": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
JobClusters: []jobs.JobCluster{
|
||||||
|
{
|
||||||
|
JobClusterKey: "foo",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "13.3.x-scala2.12",
|
||||||
|
NodeTypeId: "i3.xlarge",
|
||||||
|
NumWorkers: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
JobClusterKey: "bar",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "10.4.x-scala2.12",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
JobClusterKey: "foo",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
j := b.Config.Resources.Jobs["foo"]
|
||||||
|
|
||||||
|
assert.Len(t, j.JobClusters, 2)
|
||||||
|
assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey)
|
||||||
|
assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey)
|
||||||
|
|
||||||
|
// This job cluster was merged with a subsequent one.
|
||||||
|
jc0 := j.JobClusters[0].NewCluster
|
||||||
|
assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion)
|
||||||
|
assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId)
|
||||||
|
assert.Equal(t, 4, jc0.NumWorkers)
|
||||||
|
|
||||||
|
// This job cluster was left untouched.
|
||||||
|
jc1 := j.JobClusters[1].NewCluster
|
||||||
|
assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeJobClustersWithNilKey(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"foo": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
JobClusters: []jobs.JobCluster{
|
||||||
|
{
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "13.3.x-scala2.12",
|
||||||
|
NodeTypeId: "i3.xlarge",
|
||||||
|
NumWorkers: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergeJobClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
|
||||||
|
}
|
|
@ -0,0 +1,42 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/merge"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mergeJobTasks struct{}
|
||||||
|
|
||||||
|
func MergeJobTasks() bundle.Mutator {
|
||||||
|
return &mergeJobTasks{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobTasks) Name() string {
|
||||||
|
return "MergeJobTasks"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobTasks) taskKeyString(v dyn.Value) string {
|
||||||
|
switch v.Kind() {
|
||||||
|
case dyn.KindNil:
|
||||||
|
return ""
|
||||||
|
case dyn.KindString:
|
||||||
|
return v.MustString()
|
||||||
|
default:
|
||||||
|
panic("task key must be a string")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergeJobTasks) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
if v == dyn.NilValue {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.Map(v, "resources.jobs", dyn.Foreach(func(job dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Map(job, "tasks", merge.ElementsByKey("task_key", m.taskKeyString))
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,117 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMergeJobTasks(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"foo": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
Tasks: []jobs.Task{
|
||||||
|
{
|
||||||
|
TaskKey: "foo",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "13.3.x-scala2.12",
|
||||||
|
NodeTypeId: "i3.xlarge",
|
||||||
|
NumWorkers: 2,
|
||||||
|
},
|
||||||
|
Libraries: []compute.Library{
|
||||||
|
{Whl: "package1"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
TaskKey: "bar",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "10.4.x-scala2.12",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
TaskKey: "foo",
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
Libraries: []compute.Library{
|
||||||
|
{Pypi: &compute.PythonPyPiLibrary{
|
||||||
|
Package: "package2",
|
||||||
|
}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
j := b.Config.Resources.Jobs["foo"]
|
||||||
|
|
||||||
|
assert.Len(t, j.Tasks, 2)
|
||||||
|
assert.Equal(t, "foo", j.Tasks[0].TaskKey)
|
||||||
|
assert.Equal(t, "bar", j.Tasks[1].TaskKey)
|
||||||
|
|
||||||
|
// This task was merged with a subsequent one.
|
||||||
|
task0 := j.Tasks[0]
|
||||||
|
cluster := task0.NewCluster
|
||||||
|
assert.Equal(t, "13.3.x-scala2.12", cluster.SparkVersion)
|
||||||
|
assert.Equal(t, "i3.2xlarge", cluster.NodeTypeId)
|
||||||
|
assert.Equal(t, 4, cluster.NumWorkers)
|
||||||
|
assert.Len(t, task0.Libraries, 2)
|
||||||
|
assert.Equal(t, task0.Libraries[0].Whl, "package1")
|
||||||
|
assert.Equal(t, task0.Libraries[1].Pypi.Package, "package2")
|
||||||
|
|
||||||
|
// This task was left untouched.
|
||||||
|
task1 := j.Tasks[1].NewCluster
|
||||||
|
assert.Equal(t, "10.4.x-scala2.12", task1.SparkVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergeJobTasksWithNilKey(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"foo": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
Tasks: []jobs.Task{
|
||||||
|
{
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "13.3.x-scala2.12",
|
||||||
|
NodeTypeId: "i3.xlarge",
|
||||||
|
NumWorkers: 2,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergeJobTasks())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Len(t, b.Config.Resources.Jobs["foo"].Tasks, 1)
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/merge"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mergePipelineClusters struct{}
|
||||||
|
|
||||||
|
func MergePipelineClusters() bundle.Mutator {
|
||||||
|
return &mergePipelineClusters{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergePipelineClusters) Name() string {
|
||||||
|
return "MergePipelineClusters"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergePipelineClusters) clusterLabel(v dyn.Value) string {
|
||||||
|
switch v.Kind() {
|
||||||
|
case dyn.KindNil:
|
||||||
|
// Note: the cluster label is optional and defaults to 'default'.
|
||||||
|
// We therefore ALSO merge all clusters without a label.
|
||||||
|
return "default"
|
||||||
|
case dyn.KindString:
|
||||||
|
return strings.ToLower(v.MustString())
|
||||||
|
default:
|
||||||
|
panic("task key must be a string")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mergePipelineClusters) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
if v == dyn.NilValue {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.Map(v, "resources.pipelines", dyn.Foreach(func(pipeline dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Map(pipeline, "clusters", merge.ElementsByKey("label", m.clusterLabel))
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,125 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestMergePipelineClusters(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
|
"foo": {
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
|
Clusters: []pipelines.PipelineCluster{
|
||||||
|
{
|
||||||
|
NodeTypeId: "i3.xlarge",
|
||||||
|
NumWorkers: 2,
|
||||||
|
PolicyId: "1234",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Label: "maintenance",
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
NodeTypeId: "i3.2xlarge",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
p := b.Config.Resources.Pipelines["foo"]
|
||||||
|
|
||||||
|
assert.Len(t, p.Clusters, 2)
|
||||||
|
assert.Equal(t, "default", p.Clusters[0].Label)
|
||||||
|
assert.Equal(t, "maintenance", p.Clusters[1].Label)
|
||||||
|
|
||||||
|
// The default cluster was merged with a subsequent one.
|
||||||
|
pc0 := p.Clusters[0]
|
||||||
|
assert.Equal(t, "i3.2xlarge", pc0.NodeTypeId)
|
||||||
|
assert.Equal(t, 4, pc0.NumWorkers)
|
||||||
|
assert.Equal(t, "1234", pc0.PolicyId)
|
||||||
|
|
||||||
|
// The maintenance cluster was left untouched.
|
||||||
|
pc1 := p.Clusters[1]
|
||||||
|
assert.Equal(t, "i3.2xlarge", pc1.NodeTypeId)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergePipelineClustersCaseInsensitive(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
|
"foo": {
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
|
Clusters: []pipelines.PipelineCluster{
|
||||||
|
{
|
||||||
|
Label: "default",
|
||||||
|
NumWorkers: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Label: "DEFAULT",
|
||||||
|
NumWorkers: 4,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
p := b.Config.Resources.Pipelines["foo"]
|
||||||
|
assert.Len(t, p.Clusters, 1)
|
||||||
|
|
||||||
|
// The default cluster was merged with a subsequent one.
|
||||||
|
pc0 := p.Clusters[0]
|
||||||
|
assert.Equal(t, "default", strings.ToLower(pc0.Label))
|
||||||
|
assert.Equal(t, 4, pc0.NumWorkers)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergePipelineClustersNilPipelines(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Pipelines: nil,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestMergePipelineClustersEmptyPipelines(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Pipelines: map[string]*resources.Pipeline{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.MergePipelineClusters())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
}
|
|
@ -10,12 +10,16 @@ func DefaultMutators() []bundle.Mutator {
|
||||||
return []bundle.Mutator{
|
return []bundle.Mutator{
|
||||||
scripts.Execute(config.ScriptPreInit),
|
scripts.Execute(config.ScriptPreInit),
|
||||||
ProcessRootIncludes(),
|
ProcessRootIncludes(),
|
||||||
|
EnvironmentsToTargets(),
|
||||||
InitializeVariables(),
|
InitializeVariables(),
|
||||||
DefineDefaultTarget(),
|
DefineDefaultTarget(),
|
||||||
LoadGitDetails(),
|
LoadGitDetails(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultMutatorsForTarget(env string) []bundle.Mutator {
|
func DefaultMutatorsForTarget(target string) []bundle.Mutator {
|
||||||
return append(DefaultMutators(), SelectTarget(env))
|
return append(
|
||||||
|
DefaultMutators(),
|
||||||
|
SelectTarget(target),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,9 @@ func TestOverrideDevelopment(t *testing.T) {
|
||||||
Name: "job1",
|
Name: "job1",
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
NewCluster: &compute.ClusterSpec{},
|
NewCluster: &compute.ClusterSpec{
|
||||||
|
SparkVersion: "14.2.x-scala2.12",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ExistingClusterId: "cluster2",
|
ExistingClusterId: "cluster2",
|
||||||
|
|
|
@ -274,12 +274,12 @@ func TestAllResourcesMocked(t *testing.T) {
|
||||||
// Make sure that we at least rename all resources
|
// Make sure that we at least rename all resources
|
||||||
func TestAllResourcesRenamed(t *testing.T) {
|
func TestAllResourcesRenamed(t *testing.T) {
|
||||||
b := mockBundle(config.Development)
|
b := mockBundle(config.Development)
|
||||||
resources := reflect.ValueOf(b.Config.Resources)
|
|
||||||
|
|
||||||
m := ProcessTargetMode()
|
m := ProcessTargetMode()
|
||||||
err := bundle.Apply(context.Background(), b, m)
|
err := bundle.Apply(context.Background(), b, m)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
resources := reflect.ValueOf(b.Config.Resources)
|
||||||
for i := 0; i < resources.NumField(); i++ {
|
for i := 0; i < resources.NumField(); i++ {
|
||||||
field := resources.Field(i)
|
field := resources.Field(i)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,81 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
"github.com/databricks/cli/libs/dyn/dynvar"
|
||||||
|
)
|
||||||
|
|
||||||
|
type resolveVariableReferences struct {
|
||||||
|
prefixes []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func ResolveVariableReferences(prefixes ...string) bundle.Mutator {
|
||||||
|
return &resolveVariableReferences{prefixes: prefixes}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (*resolveVariableReferences) Name() string {
|
||||||
|
return "ResolveVariableReferences"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *resolveVariableReferences) Validate(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
prefixes := make([]dyn.Path, len(m.prefixes))
|
||||||
|
for i, prefix := range m.prefixes {
|
||||||
|
prefixes[i] = dyn.MustPathFromString(prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The path ${var.foo} is a shorthand for ${variables.foo.value}.
|
||||||
|
// We rewrite it here to make the resolution logic simpler.
|
||||||
|
varPath := dyn.NewPath(dyn.Key("var"))
|
||||||
|
|
||||||
|
return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
|
// Synthesize a copy of the root that has all fields that are present in the type
|
||||||
|
// but not set in the dynamic value set to their corresponding empty value.
|
||||||
|
// This enables users to interpolate variable references to fields that haven't
|
||||||
|
// been explicitly set in the dynamic value.
|
||||||
|
//
|
||||||
|
// For example: ${bundle.git.origin_url} should resolve to an empty string
|
||||||
|
// if a bundle isn't located in a Git repository (yet).
|
||||||
|
//
|
||||||
|
// This is consistent with the behavior prior to using the dynamic value system.
|
||||||
|
//
|
||||||
|
// We can ignore the diagnostics return valuebecause we know that the dynamic value
|
||||||
|
// has already been normalized when it was first loaded from the configuration file.
|
||||||
|
//
|
||||||
|
normalized, _ := convert.Normalize(b.Config, root, convert.IncludeMissingFields)
|
||||||
|
lookup := func(path dyn.Path) (dyn.Value, error) {
|
||||||
|
// Future opportunity: if we lookup this path in both the given root
|
||||||
|
// and the synthesized root, we know if it was explicitly set or implied to be empty.
|
||||||
|
// Then we can emit a warning if it was not explicitly set.
|
||||||
|
return dyn.GetByPath(normalized, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve variable references in all values.
|
||||||
|
return dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) {
|
||||||
|
// Rewrite the shorthand path ${var.foo} into ${variables.foo.value}.
|
||||||
|
if path.HasPrefix(varPath) && len(path) == 2 {
|
||||||
|
path = dyn.NewPath(
|
||||||
|
dyn.Key("variables"),
|
||||||
|
path[1],
|
||||||
|
dyn.Key("value"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform resolution only if the path starts with one of the specified prefixes.
|
||||||
|
for _, prefix := range prefixes {
|
||||||
|
if path.HasPrefix(prefix) {
|
||||||
|
return lookup(path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.InvalidValue, dynvar.ErrSkipResolution
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/config/variable"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestResolveVariableReferences(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Name: "example",
|
||||||
|
},
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
RootPath: "${bundle.name}/bar",
|
||||||
|
FilePath: "${workspace.root_path}/baz",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply with an invalid prefix. This should not change the workspace root path.
|
||||||
|
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath)
|
||||||
|
require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath)
|
||||||
|
|
||||||
|
// Apply with a valid prefix. This should change the workspace root path.
|
||||||
|
err = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
|
||||||
|
require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
||||||
|
s := func(s string) *string {
|
||||||
|
return &s
|
||||||
|
}
|
||||||
|
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Name: "example",
|
||||||
|
},
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
RootPath: "${bundle.name}/${var.foo}",
|
||||||
|
},
|
||||||
|
Variables: map[string]*variable.Variable{
|
||||||
|
"foo": {
|
||||||
|
Value: s("bar"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply with a valid prefix. This should change the workspace root path.
|
||||||
|
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "variables"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestResolveVariableReferencesToEmptyFields(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Name: "example",
|
||||||
|
Git: config.Git{
|
||||||
|
Branch: "",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"job1": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
Tags: map[string]string{
|
||||||
|
"git_branch": "${bundle.git.branch}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply for the bundle prefix.
|
||||||
|
err := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// The job settings should have been interpolated to an empty string.
|
||||||
|
require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"])
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
type rewriteSyncPaths struct{}
|
||||||
|
|
||||||
|
func RewriteSyncPaths() bundle.Mutator {
|
||||||
|
return &rewriteSyncPaths{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *rewriteSyncPaths) Name() string {
|
||||||
|
return "RewriteSyncPaths"
|
||||||
|
}
|
||||||
|
|
||||||
|
// makeRelativeTo returns a dyn.MapFunc that joins the relative path
|
||||||
|
// of the file it was defined in w.r.t. the bundle root path, with
|
||||||
|
// the contents of the string node.
|
||||||
|
//
|
||||||
|
// For example:
|
||||||
|
// - The bundle root is /foo
|
||||||
|
// - The configuration file that defines the string node is at /foo/bar/baz.yml
|
||||||
|
// - The string node contains "somefile.*"
|
||||||
|
//
|
||||||
|
// Then the resulting value will be "bar/somefile.*".
|
||||||
|
func (m *rewriteSyncPaths) makeRelativeTo(root string) dyn.MapFunc {
|
||||||
|
return func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
dir := filepath.Dir(v.Location().File)
|
||||||
|
rel, err := filepath.Rel(root, dir)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.NewValue(filepath.Join(rel, v.MustString()), v.Location()), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *rewriteSyncPaths) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Map(v, "sync", func(v dyn.Value) (nv dyn.Value, err error) {
|
||||||
|
v, err = dyn.Map(v, "include", dyn.Foreach(m.makeRelativeTo(b.Config.Path)))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
v, err = dyn.Map(v, "exclude", dyn.Foreach(m.makeRelativeTo(b.Config.Path)))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
return v, nil
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1,103 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRewriteSyncPathsRelative(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Path: ".",
|
||||||
|
Sync: config.Sync{
|
||||||
|
Include: []string{
|
||||||
|
"foo",
|
||||||
|
"bar",
|
||||||
|
},
|
||||||
|
Exclude: []string{
|
||||||
|
"baz",
|
||||||
|
"qux",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, "sync.include[0]", "./file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.include[1]", "./a/file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.exclude[0]", "./a/b/file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.exclude[1]", "./a/b/c/file.yml")
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0])
|
||||||
|
assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1])
|
||||||
|
assert.Equal(t, filepath.Clean("a/b/baz"), b.Config.Sync.Exclude[0])
|
||||||
|
assert.Equal(t, filepath.Clean("a/b/c/qux"), b.Config.Sync.Exclude[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRewriteSyncPathsAbsolute(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Path: "/tmp/dir",
|
||||||
|
Sync: config.Sync{
|
||||||
|
Include: []string{
|
||||||
|
"foo",
|
||||||
|
"bar",
|
||||||
|
},
|
||||||
|
Exclude: []string{
|
||||||
|
"baz",
|
||||||
|
"qux",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, "sync.include[0]", "/tmp/dir/file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.include[1]", "/tmp/dir/a/file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.exclude[0]", "/tmp/dir/a/b/file.yml")
|
||||||
|
bundletest.SetLocation(b, "sync.exclude[1]", "/tmp/dir/a/b/c/file.yml")
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.Equal(t, filepath.Clean("foo"), b.Config.Sync.Include[0])
|
||||||
|
assert.Equal(t, filepath.Clean("a/bar"), b.Config.Sync.Include[1])
|
||||||
|
assert.Equal(t, filepath.Clean("a/b/baz"), b.Config.Sync.Exclude[0])
|
||||||
|
assert.Equal(t, filepath.Clean("a/b/c/qux"), b.Config.Sync.Exclude[1])
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestRewriteSyncPathsErrorPaths(t *testing.T) {
|
||||||
|
t.Run("no sync block", func(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Path: ".",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("empty include/exclude blocks", func(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Path: ".",
|
||||||
|
Sync: config.Sync{
|
||||||
|
Include: []string{},
|
||||||
|
Exclude: []string{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, mutator.RewriteSyncPaths())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
})
|
||||||
|
}
|
|
@ -30,13 +30,13 @@ func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get specified target
|
// Get specified target
|
||||||
target, ok := b.Config.Targets[m.name]
|
_, ok := b.Config.Targets[m.name]
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", "))
|
return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", "))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge specified target into root configuration structure.
|
// Merge specified target into root configuration structure.
|
||||||
err := b.Config.MergeTargetOverrides(target)
|
err := b.Config.MergeTargetOverrides(m.name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,8 +9,8 @@ import (
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/mutator"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/config/paths"
|
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
|
@ -44,10 +44,6 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
|
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
GitSource: &jobs.GitSource{
|
GitSource: &jobs.GitSource{
|
||||||
GitBranch: "somebranch",
|
GitBranch: "somebranch",
|
||||||
|
@ -80,6 +76,8 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -116,9 +114,6 @@ func TestTranslatePaths(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -171,9 +166,6 @@ func TestTranslatePaths(t *testing.T) {
|
||||||
},
|
},
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -207,6 +199,8 @@ func TestTranslatePaths(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -287,9 +281,6 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "job/resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -323,10 +314,6 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
|
||||||
},
|
},
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "pipeline/resource.yml"),
|
|
||||||
},
|
|
||||||
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -342,6 +329,9 @@ func TestTranslatePathsInSubdirectories(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, "resources.jobs", filepath.Join(dir, "job/resource.yml"))
|
||||||
|
bundletest.SetLocation(b, "resources.pipelines", filepath.Join(dir, "pipeline/resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -385,9 +375,6 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "../resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -403,6 +390,8 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "../resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.ErrorContains(t, err, "is not contained in bundle root")
|
assert.ErrorContains(t, err, "is not contained in bundle root")
|
||||||
}
|
}
|
||||||
|
@ -416,9 +405,6 @@ func TestJobNotebookDoesNotExistError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "fake.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -434,6 +420,8 @@ func TestJobNotebookDoesNotExistError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found")
|
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found")
|
||||||
}
|
}
|
||||||
|
@ -447,9 +435,6 @@ func TestJobFileDoesNotExistError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "fake.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -465,6 +450,8 @@ func TestJobFileDoesNotExistError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.EqualError(t, err, "file ./doesnt_exist.py not found")
|
assert.EqualError(t, err, "file ./doesnt_exist.py not found")
|
||||||
}
|
}
|
||||||
|
@ -478,9 +465,6 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "fake.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -496,6 +480,8 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found")
|
assert.EqualError(t, err, "notebook ./doesnt_exist.py not found")
|
||||||
}
|
}
|
||||||
|
@ -509,9 +495,6 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "fake.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -527,6 +510,8 @@ func TestPipelineFileDoesNotExistError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "fake.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.EqualError(t, err, "file ./doesnt_exist.py not found")
|
assert.EqualError(t, err, "file ./doesnt_exist.py not found")
|
||||||
}
|
}
|
||||||
|
@ -544,9 +529,6 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -562,6 +544,8 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`)
|
assert.ErrorContains(t, err, `expected a file for "tasks.spark_python_task.python_file" but got a notebook`)
|
||||||
}
|
}
|
||||||
|
@ -579,9 +563,6 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"job": {
|
"job": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
|
@ -597,6 +578,8 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`)
|
assert.ErrorContains(t, err, `expected a notebook for "tasks.notebook_task.notebook_path" but got a file`)
|
||||||
}
|
}
|
||||||
|
@ -614,9 +597,6 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -632,6 +612,8 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`)
|
assert.ErrorContains(t, err, `expected a notebook for "libraries.notebook.path" but got a file`)
|
||||||
}
|
}
|
||||||
|
@ -649,9 +631,6 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"pipeline": {
|
"pipeline": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: filepath.Join(dir, "resource.yml"),
|
|
||||||
},
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
Libraries: []pipelines.PipelineLibrary{
|
Libraries: []pipelines.PipelineLibrary{
|
||||||
{
|
{
|
||||||
|
@ -667,6 +646,8 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", filepath.Join(dir, "resource.yml"))
|
||||||
|
|
||||||
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
err := bundle.Apply(context.Background(), b, mutator.TranslatePaths())
|
||||||
assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`)
|
assert.ErrorContains(t, err, `expected a file for "libraries.file.path" but got a notebook`)
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,12 +3,25 @@ package paths
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Paths struct {
|
type Paths struct {
|
||||||
// Absolute path on the local file system to the configuration file that holds
|
// Absolute path on the local file system to the configuration file that holds
|
||||||
// the definition of this resource.
|
// the definition of this resource.
|
||||||
ConfigFilePath string `json:"-" bundle:"readonly"`
|
ConfigFilePath string `json:"-" bundle:"readonly"`
|
||||||
|
|
||||||
|
// DynamicValue stores the [dyn.Value] of the containing struct.
|
||||||
|
// This assumes that this struct is always embedded.
|
||||||
|
DynamicValue dyn.Value `json:"-"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Paths) ConfigureConfigFilePath() {
|
||||||
|
if !p.DynamicValue.IsValid() {
|
||||||
|
panic("DynamicValue not set")
|
||||||
|
}
|
||||||
|
p.ConfigFilePath = p.DynamicValue.Location().File
|
||||||
}
|
}
|
||||||
|
|
||||||
func (p *Paths) ConfigFileDirectory() (string, error) {
|
func (p *Paths) ConfigFileDirectory() (string, error) {
|
||||||
|
|
|
@ -126,51 +126,30 @@ func (r *Resources) VerifyUniqueResourceIdentifiers() (*UniqueResourceIdTracker,
|
||||||
return tracker, nil
|
return tracker, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetConfigFilePath sets the specified path for all resources contained in this instance.
|
// ConfigureConfigFilePath sets the specified path for all resources contained in this instance.
|
||||||
// This property is used to correctly resolve paths relative to the path
|
// This property is used to correctly resolve paths relative to the path
|
||||||
// of the configuration file they were defined in.
|
// of the configuration file they were defined in.
|
||||||
func (r *Resources) SetConfigFilePath(path string) {
|
func (r *Resources) ConfigureConfigFilePath() {
|
||||||
for _, e := range r.Jobs {
|
for _, e := range r.Jobs {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
for _, e := range r.Pipelines {
|
for _, e := range r.Pipelines {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
for _, e := range r.Models {
|
for _, e := range r.Models {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
for _, e := range r.Experiments {
|
for _, e := range r.Experiments {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
for _, e := range r.ModelServingEndpoints {
|
for _, e := range r.ModelServingEndpoints {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
for _, e := range r.RegisteredModels {
|
for _, e := range r.RegisteredModels {
|
||||||
e.ConfigFilePath = path
|
e.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge iterates over all resources and merges chunks of the
|
|
||||||
// resource configuration that can be merged. For example, for
|
|
||||||
// jobs, this merges job cluster definitions and tasks that
|
|
||||||
// use the same `job_cluster_key`, or `task_key`, respectively.
|
|
||||||
func (r *Resources) Merge() error {
|
|
||||||
for _, job := range r.Jobs {
|
|
||||||
if err := job.MergeJobClusters(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err := job.MergeTasks(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, pipeline := range r.Pipelines {
|
|
||||||
if err := pipeline.MergeClusters(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type ConfigResource interface {
|
type ConfigResource interface {
|
||||||
Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error)
|
Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error)
|
||||||
TerraformResourceName() string
|
TerraformResourceName() string
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
"github.com/databricks/databricks-sdk-go/marshal"
|
"github.com/databricks/databricks-sdk-go/marshal"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/imdario/mergo"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Job struct {
|
type Job struct {
|
||||||
|
@ -30,72 +29,6 @@ func (s Job) MarshalJSON() ([]byte, error) {
|
||||||
return marshal.Marshal(s)
|
return marshal.Marshal(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// MergeJobClusters merges job clusters with the same key.
|
|
||||||
// The job clusters field is a slice, and as such, overrides are appended to it.
|
|
||||||
// We can identify a job cluster by its key, however, so we can use this key
|
|
||||||
// to figure out which definitions are actually overrides and merge them.
|
|
||||||
func (j *Job) MergeJobClusters() error {
|
|
||||||
keys := make(map[string]*jobs.JobCluster)
|
|
||||||
output := make([]jobs.JobCluster, 0, len(j.JobClusters))
|
|
||||||
|
|
||||||
// Target overrides are always appended, so we can iterate in natural order to
|
|
||||||
// first find the base definition, and merge instances we encounter later.
|
|
||||||
for i := range j.JobClusters {
|
|
||||||
key := j.JobClusters[i].JobClusterKey
|
|
||||||
|
|
||||||
// Register job cluster with key if not yet seen before.
|
|
||||||
ref, ok := keys[key]
|
|
||||||
if !ok {
|
|
||||||
output = append(output, j.JobClusters[i])
|
|
||||||
keys[key] = &output[len(output)-1]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge this instance into the reference.
|
|
||||||
err := mergo.Merge(ref, &j.JobClusters[i], mergo.WithOverride, mergo.WithAppendSlice)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite resulting slice.
|
|
||||||
j.JobClusters = output
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// MergeTasks merges tasks with the same key.
|
|
||||||
// The tasks field is a slice, and as such, overrides are appended to it.
|
|
||||||
// We can identify a task by its task key, however, so we can use this key
|
|
||||||
// to figure out which definitions are actually overrides and merge them.
|
|
||||||
func (j *Job) MergeTasks() error {
|
|
||||||
keys := make(map[string]*jobs.Task)
|
|
||||||
tasks := make([]jobs.Task, 0, len(j.Tasks))
|
|
||||||
|
|
||||||
// Target overrides are always appended, so we can iterate in natural order to
|
|
||||||
// first find the base definition, and merge instances we encounter later.
|
|
||||||
for i := range j.Tasks {
|
|
||||||
key := j.Tasks[i].TaskKey
|
|
||||||
|
|
||||||
// Register the task with key if not yet seen before.
|
|
||||||
ref, ok := keys[key]
|
|
||||||
if !ok {
|
|
||||||
tasks = append(tasks, j.Tasks[i])
|
|
||||||
keys[key] = &tasks[len(tasks)-1]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge this instance into the reference.
|
|
||||||
err := mergo.Merge(ref, &j.Tasks[i], mergo.WithOverride, mergo.WithAppendSlice)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite resulting slice.
|
|
||||||
j.Tasks = tasks
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
func (j *Job) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
||||||
jobId, err := strconv.Atoi(id)
|
jobId, err := strconv.Atoi(id)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -1,116 +0,0 @@
|
||||||
package resources
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestJobMergeJobClusters(t *testing.T) {
|
|
||||||
j := &Job{
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
JobClusters: []jobs.JobCluster{
|
|
||||||
{
|
|
||||||
JobClusterKey: "foo",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
SparkVersion: "13.3.x-scala2.12",
|
|
||||||
NodeTypeId: "i3.xlarge",
|
|
||||||
NumWorkers: 2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
JobClusterKey: "bar",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
SparkVersion: "10.4.x-scala2.12",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
JobClusterKey: "foo",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
NodeTypeId: "i3.2xlarge",
|
|
||||||
NumWorkers: 4,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := j.MergeJobClusters()
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Len(t, j.JobClusters, 2)
|
|
||||||
assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey)
|
|
||||||
assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey)
|
|
||||||
|
|
||||||
// This job cluster was merged with a subsequent one.
|
|
||||||
jc0 := j.JobClusters[0].NewCluster
|
|
||||||
assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion)
|
|
||||||
assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId)
|
|
||||||
assert.Equal(t, 4, jc0.NumWorkers)
|
|
||||||
|
|
||||||
// This job cluster was left untouched.
|
|
||||||
jc1 := j.JobClusters[1].NewCluster
|
|
||||||
assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestJobMergeTasks(t *testing.T) {
|
|
||||||
j := &Job{
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
Tasks: []jobs.Task{
|
|
||||||
{
|
|
||||||
TaskKey: "foo",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
SparkVersion: "13.3.x-scala2.12",
|
|
||||||
NodeTypeId: "i3.xlarge",
|
|
||||||
NumWorkers: 2,
|
|
||||||
},
|
|
||||||
Libraries: []compute.Library{
|
|
||||||
{Whl: "package1"},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
TaskKey: "bar",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
SparkVersion: "10.4.x-scala2.12",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
TaskKey: "foo",
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
NodeTypeId: "i3.2xlarge",
|
|
||||||
NumWorkers: 4,
|
|
||||||
},
|
|
||||||
Libraries: []compute.Library{
|
|
||||||
{Pypi: &compute.PythonPyPiLibrary{
|
|
||||||
Package: "package2",
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := j.MergeTasks()
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Len(t, j.Tasks, 2)
|
|
||||||
assert.Equal(t, "foo", j.Tasks[0].TaskKey)
|
|
||||||
assert.Equal(t, "bar", j.Tasks[1].TaskKey)
|
|
||||||
|
|
||||||
// This task was merged with a subsequent one.
|
|
||||||
task0 := j.Tasks[0]
|
|
||||||
cluster := task0.NewCluster
|
|
||||||
assert.Equal(t, "13.3.x-scala2.12", cluster.SparkVersion)
|
|
||||||
assert.Equal(t, "i3.2xlarge", cluster.NodeTypeId)
|
|
||||||
assert.Equal(t, 4, cluster.NumWorkers)
|
|
||||||
assert.Len(t, task0.Libraries, 2)
|
|
||||||
assert.Equal(t, task0.Libraries[0].Whl, "package1")
|
|
||||||
assert.Equal(t, task0.Libraries[1].Pypi.Package, "package2")
|
|
||||||
|
|
||||||
// This task was left untouched.
|
|
||||||
task1 := j.Tasks[1].NewCluster
|
|
||||||
assert.Equal(t, "10.4.x-scala2.12", task1.SparkVersion)
|
|
||||||
}
|
|
|
@ -2,14 +2,12 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/config/paths"
|
"github.com/databricks/cli/bundle/config/paths"
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
"github.com/databricks/databricks-sdk-go/marshal"
|
"github.com/databricks/databricks-sdk-go/marshal"
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
"github.com/imdario/mergo"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Pipeline struct {
|
type Pipeline struct {
|
||||||
|
@ -30,53 +28,6 @@ func (s Pipeline) MarshalJSON() ([]byte, error) {
|
||||||
return marshal.Marshal(s)
|
return marshal.Marshal(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
// MergeClusters merges cluster definitions with same label.
|
|
||||||
// The clusters field is a slice, and as such, overrides are appended to it.
|
|
||||||
// We can identify a cluster by its label, however, so we can use this label
|
|
||||||
// to figure out which definitions are actually overrides and merge them.
|
|
||||||
//
|
|
||||||
// Note: the cluster label is optional and defaults to 'default'.
|
|
||||||
// We therefore ALSO merge all clusters without a label.
|
|
||||||
func (p *Pipeline) MergeClusters() error {
|
|
||||||
clusters := make(map[string]*pipelines.PipelineCluster)
|
|
||||||
output := make([]pipelines.PipelineCluster, 0, len(p.Clusters))
|
|
||||||
|
|
||||||
// Normalize cluster labels.
|
|
||||||
// If empty, this defaults to "default".
|
|
||||||
// To make matching case insensitive, labels are lowercased.
|
|
||||||
for i := range p.Clusters {
|
|
||||||
label := p.Clusters[i].Label
|
|
||||||
if label == "" {
|
|
||||||
label = "default"
|
|
||||||
}
|
|
||||||
p.Clusters[i].Label = strings.ToLower(label)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Target overrides are always appended, so we can iterate in natural order to
|
|
||||||
// first find the base definition, and merge instances we encounter later.
|
|
||||||
for i := range p.Clusters {
|
|
||||||
label := p.Clusters[i].Label
|
|
||||||
|
|
||||||
// Register pipeline cluster with label if not yet seen before.
|
|
||||||
ref, ok := clusters[label]
|
|
||||||
if !ok {
|
|
||||||
output = append(output, p.Clusters[i])
|
|
||||||
clusters[label] = &output[len(output)-1]
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge this instance into the reference.
|
|
||||||
err := mergo.Merge(ref, &p.Clusters[i], mergo.WithOverride, mergo.WithAppendSlice)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite resulting slice.
|
|
||||||
p.Clusters = output
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
func (p *Pipeline) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
||||||
_, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{
|
_, err := w.Pipelines.Get(ctx, pipelines.GetPipelineRequest{
|
||||||
PipelineId: id,
|
PipelineId: id,
|
||||||
|
|
|
@ -1,76 +0,0 @@
|
||||||
package resources
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"github.com/stretchr/testify/require"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestPipelineMergeClusters(t *testing.T) {
|
|
||||||
p := &Pipeline{
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
|
||||||
Clusters: []pipelines.PipelineCluster{
|
|
||||||
{
|
|
||||||
NodeTypeId: "i3.xlarge",
|
|
||||||
NumWorkers: 2,
|
|
||||||
PolicyId: "1234",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Label: "maintenance",
|
|
||||||
NodeTypeId: "i3.2xlarge",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
NodeTypeId: "i3.2xlarge",
|
|
||||||
NumWorkers: 4,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := p.MergeClusters()
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Len(t, p.Clusters, 2)
|
|
||||||
assert.Equal(t, "default", p.Clusters[0].Label)
|
|
||||||
assert.Equal(t, "maintenance", p.Clusters[1].Label)
|
|
||||||
|
|
||||||
// The default cluster was merged with a subsequent one.
|
|
||||||
pc0 := p.Clusters[0]
|
|
||||||
assert.Equal(t, "i3.2xlarge", pc0.NodeTypeId)
|
|
||||||
assert.Equal(t, 4, pc0.NumWorkers)
|
|
||||||
assert.Equal(t, "1234", pc0.PolicyId)
|
|
||||||
|
|
||||||
// The maintenance cluster was left untouched.
|
|
||||||
pc1 := p.Clusters[1]
|
|
||||||
assert.Equal(t, "i3.2xlarge", pc1.NodeTypeId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPipelineMergeClustersCaseInsensitive(t *testing.T) {
|
|
||||||
p := &Pipeline{
|
|
||||||
PipelineSpec: &pipelines.PipelineSpec{
|
|
||||||
Clusters: []pipelines.PipelineCluster{
|
|
||||||
{
|
|
||||||
Label: "default",
|
|
||||||
NumWorkers: 2,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Label: "DEFAULT",
|
|
||||||
NumWorkers: 4,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
err := p.MergeClusters()
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Len(t, p.Clusters, 1)
|
|
||||||
|
|
||||||
// The default cluster was merged with a subsequent one.
|
|
||||||
pc0 := p.Clusters[0]
|
|
||||||
assert.Equal(t, "default", strings.ToLower(pc0.Label))
|
|
||||||
assert.Equal(t, 4, pc0.NumWorkers)
|
|
||||||
}
|
|
|
@ -1,6 +1,8 @@
|
||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -8,12 +10,20 @@ import (
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
"github.com/databricks/cli/bundle/config/variable"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
"github.com/databricks/cli/libs/dyn/merge"
|
||||||
|
"github.com/databricks/cli/libs/dyn/yamlloader"
|
||||||
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/ghodss/yaml"
|
|
||||||
"github.com/imdario/mergo"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Root struct {
|
type Root struct {
|
||||||
|
value dyn.Value
|
||||||
|
diags diag.Diagnostics
|
||||||
|
depth int
|
||||||
|
|
||||||
// Path contains the directory path to the root of the bundle.
|
// Path contains the directory path to the root of the bundle.
|
||||||
// It is set when loading `databricks.yml`.
|
// It is set when loading `databricks.yml`.
|
||||||
Path string `json:"-" bundle:"readonly"`
|
Path string `json:"-" bundle:"readonly"`
|
||||||
|
@ -70,48 +80,169 @@ func Load(path string) (*Root, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var r Root
|
r := Root{
|
||||||
err = yaml.Unmarshal(raw, &r)
|
Path: filepath.Dir(path),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load configuration tree from YAML.
|
||||||
|
v, err := yamlloader.LoadYAML(path, bytes.NewBuffer(raw))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("failed to load %s: %w", path, err)
|
return nil, fmt.Errorf("failed to load %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.Environments != nil && r.Targets != nil {
|
// Rewrite configuration tree where necessary.
|
||||||
return nil, fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path)
|
v, err = rewriteShorthands(v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to rewrite %s: %w", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.Environments != nil {
|
// Normalize dynamic configuration tree according to configuration type.
|
||||||
//TODO: add a command line notice that this is a deprecated option.
|
v, diags := convert.Normalize(r, v)
|
||||||
r.Targets = r.Environments
|
|
||||||
}
|
|
||||||
|
|
||||||
r.Path = filepath.Dir(path)
|
// Keep track of diagnostics (warnings and errors in the schema).
|
||||||
r.SetConfigFilePath(path)
|
// We delay acting on diagnostics until we have loaded all
|
||||||
|
// configuration files and merged them together.
|
||||||
|
r.diags = diags
|
||||||
|
|
||||||
|
// Convert normalized configuration tree to typed configuration.
|
||||||
|
err = r.updateWithDynamicValue(v)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to load %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
_, err = r.Resources.VerifyUniqueResourceIdentifiers()
|
_, err = r.Resources.VerifyUniqueResourceIdentifiers()
|
||||||
return &r, err
|
return &r, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetConfigFilePath configures the path that its configuration
|
func (r *Root) initializeDynamicValue() error {
|
||||||
// was loaded from in configuration leafs that require it.
|
// Many test cases initialize a config as a Go struct literal.
|
||||||
func (r *Root) SetConfigFilePath(path string) {
|
// The value will be invalid and we need to populate it from the typed configuration.
|
||||||
r.Resources.SetConfigFilePath(path)
|
if r.value.IsValid() {
|
||||||
if r.Artifacts != nil {
|
return nil
|
||||||
r.Artifacts.SetConfigFilePath(path)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.Targets != nil {
|
nv, err := convert.FromTyped(r, dyn.NilValue)
|
||||||
for _, env := range r.Targets {
|
if err != nil {
|
||||||
if env == nil {
|
return err
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
if env.Resources != nil {
|
|
||||||
env.Resources.SetConfigFilePath(path)
|
r.value = nv
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Root) updateWithDynamicValue(nv dyn.Value) error {
|
||||||
|
// Hack: restore state; it may be cleared by [ToTyped] if
|
||||||
|
// the configuration equals nil (happens in tests).
|
||||||
|
diags := r.diags
|
||||||
|
depth := r.depth
|
||||||
|
path := r.Path
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
r.diags = diags
|
||||||
|
r.depth = depth
|
||||||
|
r.Path = path
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Convert normalized configuration tree to typed configuration.
|
||||||
|
err := convert.ToTyped(r, nv)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
if env.Artifacts != nil {
|
|
||||||
env.Artifacts.SetConfigFilePath(path)
|
// Assign the normalized configuration tree.
|
||||||
|
r.value = nv
|
||||||
|
|
||||||
|
// Assign config file paths after converting to typed configuration.
|
||||||
|
r.ConfigureConfigFilePath()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Root) Mutate(fn func(dyn.Value) (dyn.Value, error)) error {
|
||||||
|
err := r.initializeDynamicValue()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
nv, err := fn(r.value)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = r.updateWithDynamicValue(nv)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Root) MarkMutatorEntry(ctx context.Context) error {
|
||||||
|
err := r.initializeDynamicValue()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
r.depth++
|
||||||
|
|
||||||
|
// If we are entering a mutator at depth 1, we need to convert
|
||||||
|
// the dynamic configuration tree to typed configuration.
|
||||||
|
if r.depth == 1 {
|
||||||
|
// Always run ToTyped upon entering a mutator.
|
||||||
|
// Convert normalized configuration tree to typed configuration.
|
||||||
|
err := r.updateWithDynamicValue(r.value)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
nv, err := convert.FromTyped(r, r.value)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "unable to convert typed configuration to dynamic configuration: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-run ToTyped to ensure that no state is piggybacked
|
||||||
|
err = r.updateWithDynamicValue(nv)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Root) MarkMutatorExit(ctx context.Context) error {
|
||||||
|
r.depth--
|
||||||
|
|
||||||
|
// If we are exiting a mutator at depth 0, we need to convert
|
||||||
|
// the typed configuration to a dynamic configuration tree.
|
||||||
|
if r.depth == 0 {
|
||||||
|
nv, err := convert.FromTyped(r, r.value)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "unable to convert typed configuration to dynamic configuration: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Re-run ToTyped to ensure that no state is piggybacked
|
||||||
|
err = r.updateWithDynamicValue(nv)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf(ctx, "unable to convert dynamic configuration to typed configuration: %v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Root) Diagnostics() diag.Diagnostics {
|
||||||
|
return r.diags
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetConfigFilePath configures the path that its configuration
|
||||||
|
// was loaded from in configuration leafs that require it.
|
||||||
|
func (r *Root) ConfigureConfigFilePath() {
|
||||||
|
r.Resources.ConfigureConfigFilePath()
|
||||||
|
if r.Artifacts != nil {
|
||||||
|
r.Artifacts.ConfigureConfigFilePath()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -139,126 +270,189 @@ func (r *Root) InitializeVariables(vars []string) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Root) Merge(other *Root) error {
|
func (r *Root) Merge(other *Root) error {
|
||||||
err := r.Sync.Merge(r, other)
|
// Merge diagnostics.
|
||||||
if err != nil {
|
r.diags = append(r.diags, other.diags...)
|
||||||
return err
|
|
||||||
}
|
|
||||||
other.Sync = Sync{}
|
|
||||||
|
|
||||||
// TODO: when hooking into merge semantics, disallow setting path on the target instance.
|
|
||||||
other.Path = ""
|
|
||||||
|
|
||||||
// Check for safe merge, protecting against duplicate resource identifiers
|
// Check for safe merge, protecting against duplicate resource identifiers
|
||||||
err = r.Resources.VerifySafeMerge(&other.Resources)
|
err := r.Resources.VerifySafeMerge(&other.Resources)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: define and test semantics for merging.
|
// Merge dynamic configuration values.
|
||||||
return mergo.Merge(r, other, mergo.WithOverride)
|
return r.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
|
return merge.Merge(root, other.value)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Root) MergeTargetOverrides(target *Target) error {
|
func mergeField(rv, ov dyn.Value, name string) (dyn.Value, error) {
|
||||||
|
path := dyn.NewPath(dyn.Key(name))
|
||||||
|
reference, _ := dyn.GetByPath(rv, path)
|
||||||
|
override, _ := dyn.GetByPath(ov, path)
|
||||||
|
|
||||||
|
// Merge the override into the reference.
|
||||||
|
var out dyn.Value
|
||||||
var err error
|
var err error
|
||||||
|
if reference.IsValid() && override.IsValid() {
|
||||||
// Target may be nil if it's empty.
|
out, err = merge.Merge(reference, override)
|
||||||
if target == nil {
|
if err != nil {
|
||||||
return nil
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
} else if reference.IsValid() {
|
||||||
|
out = reference
|
||||||
|
} else if override.IsValid() {
|
||||||
|
out = override
|
||||||
|
} else {
|
||||||
|
return rv, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Bundle != nil {
|
return dyn.SetByPath(rv, path, out)
|
||||||
err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride)
|
}
|
||||||
|
|
||||||
|
func (r *Root) MergeTargetOverrides(name string) error {
|
||||||
|
root := r.value
|
||||||
|
target, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("targets"), dyn.Key(name)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Confirm validity of variable overrides.
|
||||||
|
err = validateVariableOverrides(root, target)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge fields that can be merged 1:1.
|
||||||
|
for _, f := range []string{
|
||||||
|
"bundle",
|
||||||
|
"workspace",
|
||||||
|
"artifacts",
|
||||||
|
"resources",
|
||||||
|
"sync",
|
||||||
|
"permissions",
|
||||||
|
"variables",
|
||||||
|
} {
|
||||||
|
if root, err = mergeField(root, target, f); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge `run_as`. This field must be overwritten if set, not merged.
|
||||||
|
if v := target.Get("run_as"); v != dyn.NilValue {
|
||||||
|
root, err = dyn.Set(root, "run_as", v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Workspace != nil {
|
// Below, we're setting fields on the bundle key, so make sure it exists.
|
||||||
err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride)
|
if root.Get("bundle") == dyn.NilValue {
|
||||||
|
root, err = dyn.Set(root, "bundle", dyn.NewValue(map[string]dyn.Value{}, dyn.Location{}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Artifacts != nil {
|
// Merge `mode`. This field must be overwritten if set, not merged.
|
||||||
err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice)
|
if v := target.Get("mode"); v != dyn.NilValue {
|
||||||
|
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("mode")), v)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Resources != nil {
|
// Merge `compute_id`. This field must be overwritten if set, not merged.
|
||||||
err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice)
|
if v := target.Get("compute_id"); v != dyn.NilValue {
|
||||||
if err != nil {
|
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("compute_id")), v)
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = r.Resources.Merge()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if target.Variables != nil {
|
// Merge `git`.
|
||||||
for k, v := range target.Variables {
|
if v := target.Get("git"); v != dyn.NilValue {
|
||||||
rootVariable, ok := r.Variables[k]
|
ref, err := dyn.GetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git")))
|
||||||
if !ok {
|
if err != nil {
|
||||||
|
ref = dyn.NewValue(map[string]dyn.Value{}, dyn.Location{})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge the override into the reference.
|
||||||
|
out, err := merge.Merge(ref, v)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the branch was overridden, we need to clear the inferred flag.
|
||||||
|
if branch := v.Get("branch"); branch != dyn.NilValue {
|
||||||
|
out, err = dyn.SetByPath(out, dyn.NewPath(dyn.Key("inferred")), dyn.NewValue(false, dyn.Location{}))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the merged value.
|
||||||
|
root, err = dyn.SetByPath(root, dyn.NewPath(dyn.Key("bundle"), dyn.Key("git")), out)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert normalized configuration tree to typed configuration.
|
||||||
|
return r.updateWithDynamicValue(root)
|
||||||
|
}
|
||||||
|
|
||||||
|
// rewriteShorthands performs lightweight rewriting of the configuration
|
||||||
|
// tree where we allow users to write a shorthand and must rewrite to the full form.
|
||||||
|
func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
|
||||||
|
if v.Kind() != dyn.KindMap {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each target, rewrite the variables block.
|
||||||
|
return dyn.Map(v, "targets", dyn.Foreach(func(target dyn.Value) (dyn.Value, error) {
|
||||||
|
// Confirm it has a variables block.
|
||||||
|
if target.Get("variables") == dyn.NilValue {
|
||||||
|
return target, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// For each variable, normalize its contents if it is a single string.
|
||||||
|
return dyn.Map(target, "variables", dyn.Foreach(func(variable dyn.Value) (dyn.Value, error) {
|
||||||
|
if variable.Kind() != dyn.KindString {
|
||||||
|
return variable, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rewrite the variable to a map with a single key called "default".
|
||||||
|
// This conforms to the variable type.
|
||||||
|
return dyn.NewValue(map[string]dyn.Value{
|
||||||
|
"default": variable,
|
||||||
|
}, variable.Location()), nil
|
||||||
|
}))
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// validateVariableOverrides checks that all variables specified
|
||||||
|
// in the target override are also defined in the root.
|
||||||
|
func validateVariableOverrides(root, target dyn.Value) (err error) {
|
||||||
|
var rv map[string]variable.Variable
|
||||||
|
var tv map[string]variable.Variable
|
||||||
|
|
||||||
|
// Collect variables from the root.
|
||||||
|
err = convert.ToTyped(&rv, root.Get("variables"))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to collect variables from root: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect variables from the target.
|
||||||
|
err = convert.ToTyped(&tv, target.Get("variables"))
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("unable to collect variables from target: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check that all variables in the target exist in the root.
|
||||||
|
for k := range tv {
|
||||||
|
if _, ok := rv[k]; !ok {
|
||||||
return fmt.Errorf("variable %s is not defined but is assigned a value", k)
|
return fmt.Errorf("variable %s is not defined but is assigned a value", k)
|
||||||
}
|
}
|
||||||
|
|
||||||
if sv, ok := v.(string); ok {
|
|
||||||
// we allow overrides of the default value for a variable
|
|
||||||
defaultVal := sv
|
|
||||||
rootVariable.Default = &defaultVal
|
|
||||||
} else if vv, ok := v.(map[string]any); ok {
|
|
||||||
// we also allow overrides of the lookup value for a variable
|
|
||||||
lookup, ok := vv["lookup"]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("variable %s is incorrectly defined lookup override, no 'lookup' key defined", k)
|
|
||||||
}
|
|
||||||
rootVariable.Lookup = variable.LookupFromMap(lookup.(map[string]any))
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("variable %s is incorrectly defined in target override", k)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if target.RunAs != nil {
|
|
||||||
r.RunAs = target.RunAs
|
|
||||||
}
|
|
||||||
|
|
||||||
if target.Mode != "" {
|
|
||||||
r.Bundle.Mode = target.Mode
|
|
||||||
}
|
|
||||||
|
|
||||||
if target.ComputeID != "" {
|
|
||||||
r.Bundle.ComputeID = target.ComputeID
|
|
||||||
}
|
|
||||||
|
|
||||||
git := &r.Bundle.Git
|
|
||||||
if target.Git.Branch != "" {
|
|
||||||
git.Branch = target.Git.Branch
|
|
||||||
git.Inferred = false
|
|
||||||
}
|
|
||||||
if target.Git.Commit != "" {
|
|
||||||
git.Commit = target.Git.Commit
|
|
||||||
}
|
|
||||||
if target.Git.OriginURL != "" {
|
|
||||||
git.OriginURL = target.Git.OriginURL
|
|
||||||
}
|
|
||||||
|
|
||||||
if target.Sync != nil {
|
|
||||||
err = mergo.Merge(&r.Sync, target.Sync, mergo.WithAppendSlice)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if target.Permissions != nil {
|
|
||||||
err = mergo.Merge(&r.Permissions, target.Permissions, mergo.WithAppendSlice)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
|
|
@ -30,51 +30,6 @@ func TestRootLoad(t *testing.T) {
|
||||||
assert.Equal(t, "basic", root.Bundle.Name)
|
assert.Equal(t, "basic", root.Bundle.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRootMergeStruct(t *testing.T) {
|
|
||||||
root := &Root{
|
|
||||||
Path: "path",
|
|
||||||
Workspace: Workspace{
|
|
||||||
Host: "foo",
|
|
||||||
Profile: "profile",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
other := &Root{
|
|
||||||
Path: "path",
|
|
||||||
Workspace: Workspace{
|
|
||||||
Host: "bar",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
assert.NoError(t, root.Merge(other))
|
|
||||||
assert.Equal(t, "bar", root.Workspace.Host)
|
|
||||||
assert.Equal(t, "profile", root.Workspace.Profile)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRootMergeMap(t *testing.T) {
|
|
||||||
root := &Root{
|
|
||||||
Path: "path",
|
|
||||||
Targets: map[string]*Target{
|
|
||||||
"development": {
|
|
||||||
Workspace: &Workspace{
|
|
||||||
Host: "foo",
|
|
||||||
Profile: "profile",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
other := &Root{
|
|
||||||
Path: "path",
|
|
||||||
Targets: map[string]*Target{
|
|
||||||
"development": {
|
|
||||||
Workspace: &Workspace{
|
|
||||||
Host: "bar",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
assert.NoError(t, root.Merge(other))
|
|
||||||
assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Targets["development"].Workspace)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDuplicateIdOnLoadReturnsError(t *testing.T) {
|
func TestDuplicateIdOnLoadReturnsError(t *testing.T) {
|
||||||
_, err := Load("./testdata/duplicate_resource_names_in_root/databricks.yml")
|
_, err := Load("./testdata/duplicate_resource_names_in_root/databricks.yml")
|
||||||
assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)")
|
assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)")
|
||||||
|
@ -154,8 +109,13 @@ func TestInitializeVariablesUndefinedVariables(t *testing.T) {
|
||||||
func TestRootMergeTargetOverridesWithMode(t *testing.T) {
|
func TestRootMergeTargetOverridesWithMode(t *testing.T) {
|
||||||
root := &Root{
|
root := &Root{
|
||||||
Bundle: Bundle{},
|
Bundle: Bundle{},
|
||||||
|
Targets: map[string]*Target{
|
||||||
|
"development": {
|
||||||
|
Mode: Development,
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
env := &Target{Mode: Development}
|
root.initializeDynamicValue()
|
||||||
require.NoError(t, root.MergeTargetOverrides(env))
|
require.NoError(t, root.MergeTargetOverrides("development"))
|
||||||
assert.Equal(t, Development, root.Bundle.Mode)
|
assert.Equal(t, Development, root.Bundle.Mode)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
package config
|
package config
|
||||||
|
|
||||||
import "path/filepath"
|
|
||||||
|
|
||||||
type Sync struct {
|
type Sync struct {
|
||||||
// Include contains a list of globs evaluated relative to the bundle root path
|
// Include contains a list of globs evaluated relative to the bundle root path
|
||||||
// to explicitly include files that were excluded by the user's gitignore.
|
// to explicitly include files that were excluded by the user's gitignore.
|
||||||
|
@ -13,19 +11,3 @@ type Sync struct {
|
||||||
// 2) the `Include` field above.
|
// 2) the `Include` field above.
|
||||||
Exclude []string `json:"exclude,omitempty"`
|
Exclude []string `json:"exclude,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Sync) Merge(root *Root, other *Root) error {
|
|
||||||
path, err := filepath.Rel(root.Path, other.Path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, include := range other.Sync.Include {
|
|
||||||
s.Include = append(s.Include, filepath.Join(path, include))
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, exclude := range other.Sync.Exclude {
|
|
||||||
s.Exclude = append(s.Exclude, filepath.Join(path, exclude))
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/config/variable"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -33,7 +34,7 @@ type Target struct {
|
||||||
// Override default values or lookup name for defined variables
|
// Override default values or lookup name for defined variables
|
||||||
// Does not permit defining new variables or redefining existing ones
|
// Does not permit defining new variables or redefining existing ones
|
||||||
// in the scope of an target
|
// in the scope of an target
|
||||||
Variables map[string]any `json:"variables,omitempty"`
|
Variables map[string]*variable.Variable `json:"variables,omitempty"`
|
||||||
|
|
||||||
Git Git `json:"git,omitempty"`
|
Git Git `json:"git,omitempty"`
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
const VariableReferencePrefix = "var"
|
|
||||||
|
|
||||||
// An input variable for the bundle config
|
// An input variable for the bundle config
|
||||||
type Variable struct {
|
type Variable struct {
|
||||||
// A default value which then makes the variable optional
|
// A default value which then makes the variable optional
|
||||||
|
|
|
@ -6,8 +6,8 @@ import (
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/paths"
|
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
"github.com/databricks/cli/bundle/metadata"
|
"github.com/databricks/cli/bundle/metadata"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
@ -36,18 +36,12 @@ func TestComputeMetadataMutator(t *testing.T) {
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
"my-job-1": {
|
"my-job-1": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: "a/b/c",
|
|
||||||
},
|
|
||||||
ID: "1111",
|
ID: "1111",
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Name: "My Job One",
|
Name: "My Job One",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"my-job-2": {
|
"my-job-2": {
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: "d/e/f",
|
|
||||||
},
|
|
||||||
ID: "2222",
|
ID: "2222",
|
||||||
JobSettings: &jobs.JobSettings{
|
JobSettings: &jobs.JobSettings{
|
||||||
Name: "My Job Two",
|
Name: "My Job Two",
|
||||||
|
@ -55,16 +49,16 @@ func TestComputeMetadataMutator(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Pipelines: map[string]*resources.Pipeline{
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
"my-pipeline": {
|
"my-pipeline": {},
|
||||||
Paths: paths.Paths{
|
|
||||||
ConfigFilePath: "abc",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, "resources.jobs.my-job-1", "a/b/c")
|
||||||
|
bundletest.SetLocation(b, "resources.jobs.my-job-2", "d/e/f")
|
||||||
|
bundletest.SetLocation(b, "resources.pipelines.my-pipeline", "abc")
|
||||||
|
|
||||||
expectedMetadata := metadata.Metadata{
|
expectedMetadata := metadata.Metadata{
|
||||||
Version: metadata.Version,
|
Version: metadata.Version,
|
||||||
Config: metadata.Config{
|
Config: metadata.Config{
|
||||||
|
|
|
@ -1,44 +1,64 @@
|
||||||
package terraform
|
package terraform
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config/interpolation"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/dynvar"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Rewrite variable references to resources into Terraform compatible format.
|
type interpolateMutator struct {
|
||||||
func interpolateTerraformResourceIdentifiers(path string, lookup map[string]string) (string, error) {
|
|
||||||
parts := strings.Split(path, interpolation.Delimiter)
|
|
||||||
if parts[0] == "resources" {
|
|
||||||
switch parts[1] {
|
|
||||||
case "pipelines":
|
|
||||||
path = strings.Join(append([]string{"databricks_pipeline"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
case "jobs":
|
|
||||||
path = strings.Join(append([]string{"databricks_job"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
case "models":
|
|
||||||
path = strings.Join(append([]string{"databricks_mlflow_model"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
case "experiments":
|
|
||||||
path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
case "model_serving_endpoints":
|
|
||||||
path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
case "registered_models":
|
|
||||||
path = strings.Join(append([]string{"databricks_registered_model"}, parts[2:]...), interpolation.Delimiter)
|
|
||||||
return fmt.Sprintf("${%s}", path), nil
|
|
||||||
default:
|
|
||||||
panic("TODO: " + parts[1])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return interpolation.DefaultLookup(path, lookup)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func Interpolate() bundle.Mutator {
|
func Interpolate() bundle.Mutator {
|
||||||
return interpolation.Interpolate(interpolateTerraformResourceIdentifiers)
|
return &interpolateMutator{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *interpolateMutator) Name() string {
|
||||||
|
return "terraform.Interpolate"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *interpolateMutator) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||||
|
return b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
|
prefix := dyn.MustPathFromString("resources")
|
||||||
|
|
||||||
|
// Resolve variable references in all values.
|
||||||
|
return dynvar.Resolve(root, func(path dyn.Path) (dyn.Value, error) {
|
||||||
|
// Expect paths of the form:
|
||||||
|
// - resources.<resource_type>.<resource_name>.<field>...
|
||||||
|
if !path.HasPrefix(prefix) || len(path) < 4 {
|
||||||
|
return dyn.InvalidValue, dynvar.ErrSkipResolution
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rewrite the bundle configuration path:
|
||||||
|
//
|
||||||
|
// ${resources.pipelines.my_pipeline.id}
|
||||||
|
//
|
||||||
|
// into the Terraform-compatible resource identifier:
|
||||||
|
//
|
||||||
|
// ${databricks_pipeline.my_pipeline.id}
|
||||||
|
//
|
||||||
|
switch path[1] {
|
||||||
|
case dyn.Key("pipelines"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_pipeline")).Append(path[2:]...)
|
||||||
|
case dyn.Key("jobs"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_job")).Append(path[2:]...)
|
||||||
|
case dyn.Key("models"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_mlflow_model")).Append(path[2:]...)
|
||||||
|
case dyn.Key("experiments"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_mlflow_experiment")).Append(path[2:]...)
|
||||||
|
case dyn.Key("model_serving_endpoints"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_model_serving")).Append(path[2:]...)
|
||||||
|
case dyn.Key("registered_models"):
|
||||||
|
path = dyn.NewPath(dyn.Key("databricks_registered_model")).Append(path[2:]...)
|
||||||
|
default:
|
||||||
|
// Trigger "key not found" for unknown resource types.
|
||||||
|
return dyn.GetByPath(root, path)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.V(fmt.Sprintf("${%s}", path.String())), nil
|
||||||
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,92 @@
|
||||||
|
package terraform
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestInterpolate(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Bundle: config.Bundle{
|
||||||
|
Name: "example",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"my_job": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
Tags: map[string]string{
|
||||||
|
"other_pipeline": "${resources.pipelines.other_pipeline.id}",
|
||||||
|
"other_job": "${resources.jobs.other_job.id}",
|
||||||
|
"other_model": "${resources.models.other_model.id}",
|
||||||
|
"other_experiment": "${resources.experiments.other_experiment.id}",
|
||||||
|
"other_model_serving": "${resources.model_serving_endpoints.other_model_serving.id}",
|
||||||
|
"other_registered_model": "${resources.registered_models.other_registered_model.id}",
|
||||||
|
},
|
||||||
|
Tasks: []jobs.Task{
|
||||||
|
{
|
||||||
|
TaskKey: "my_task",
|
||||||
|
NotebookTask: &jobs.NotebookTask{
|
||||||
|
BaseParameters: map[string]string{
|
||||||
|
"model_name": "${resources.models.my_model.name}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Models: map[string]*resources.MlflowModel{
|
||||||
|
"my_model": {
|
||||||
|
Model: &ml.Model{
|
||||||
|
Name: "my_model",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, Interpolate())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
j := b.Config.Resources.Jobs["my_job"]
|
||||||
|
assert.Equal(t, "${databricks_pipeline.other_pipeline.id}", j.Tags["other_pipeline"])
|
||||||
|
assert.Equal(t, "${databricks_job.other_job.id}", j.Tags["other_job"])
|
||||||
|
assert.Equal(t, "${databricks_mlflow_model.other_model.id}", j.Tags["other_model"])
|
||||||
|
assert.Equal(t, "${databricks_mlflow_experiment.other_experiment.id}", j.Tags["other_experiment"])
|
||||||
|
assert.Equal(t, "${databricks_model_serving.other_model_serving.id}", j.Tags["other_model_serving"])
|
||||||
|
assert.Equal(t, "${databricks_registered_model.other_registered_model.id}", j.Tags["other_registered_model"])
|
||||||
|
|
||||||
|
m := b.Config.Resources.Models["my_model"]
|
||||||
|
assert.Equal(t, "my_model", m.Model.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInterpolateUnknownResourceType(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"my_job": {
|
||||||
|
JobSettings: &jobs.JobSettings{
|
||||||
|
Tags: map[string]string{
|
||||||
|
"other_unknown": "${resources.unknown.other_unknown.id}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
err := bundle.Apply(context.Background(), b, Interpolate())
|
||||||
|
assert.Contains(t, err.Error(), `reference does not exist: ${resources.unknown.other_unknown.id}`)
|
||||||
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
package bundletest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SetLocation sets the location of all values in the bundle to the given path.
|
||||||
|
// This is useful for testing where we need to associate configuration
|
||||||
|
// with the path it is loaded from.
|
||||||
|
func SetLocation(b *bundle.Bundle, prefix string, filePath string) {
|
||||||
|
start := dyn.MustPathFromString(prefix)
|
||||||
|
b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.Walk(root, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
// If the path has the given prefix, set the location.
|
||||||
|
if p.HasPrefix(start) {
|
||||||
|
return v.WithLocation(dyn.Location{
|
||||||
|
File: filePath,
|
||||||
|
}), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// The path is not nested under the given prefix.
|
||||||
|
// If the path is a prefix of the prefix, keep traversing and return the node verbatim.
|
||||||
|
if start.HasPrefix(p) {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return verbatim, but skip traversal.
|
||||||
|
return v, dyn.ErrSkip
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
b.Config.ConfigureConfigFilePath()
|
||||||
|
}
|
|
@ -20,7 +20,21 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error {
|
||||||
ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name()))
|
ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name()))
|
||||||
|
|
||||||
log.Debugf(ctx, "Apply")
|
log.Debugf(ctx, "Apply")
|
||||||
err := m.Apply(ctx, b)
|
|
||||||
|
err := b.Config.MarkMutatorEntry(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "entry error: %s", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
err := b.Config.MarkMutatorExit(ctx)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(ctx, "exit error: %s", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
err = m.Apply(ctx, b)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf(ctx, "Error: %s", err)
|
log.Errorf(ctx, "Error: %s", err)
|
||||||
return err
|
return err
|
||||||
|
@ -28,3 +42,20 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) error {
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type funcMutator struct {
|
||||||
|
fn func(context.Context, *Bundle) error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m funcMutator) Name() string {
|
||||||
|
return "<func>"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m funcMutator) Apply(ctx context.Context, b *Bundle) error {
|
||||||
|
return m.fn(ctx, b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ApplyFunc applies an inline-specified function mutator.
|
||||||
|
func ApplyFunc(ctx context.Context, b *Bundle, fn func(context.Context, *Bundle) error) error {
|
||||||
|
return Apply(ctx, b, funcMutator{fn})
|
||||||
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/artifacts"
|
"github.com/databricks/cli/bundle/artifacts"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/interpolation"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/scripts"
|
"github.com/databricks/cli/bundle/scripts"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -18,8 +18,8 @@ func Build() bundle.Mutator {
|
||||||
artifacts.InferMissingProperties(),
|
artifacts.InferMissingProperties(),
|
||||||
artifacts.BuildAll(),
|
artifacts.BuildAll(),
|
||||||
scripts.Execute(config.ScriptPostBuild),
|
scripts.Execute(config.ScriptPostBuild),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath("artifacts"),
|
"artifacts",
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
|
@ -3,9 +3,7 @@ package phases
|
||||||
import (
|
import (
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/interpolation"
|
|
||||||
"github.com/databricks/cli/bundle/config/mutator"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
|
||||||
"github.com/databricks/cli/bundle/deploy/metadata"
|
"github.com/databricks/cli/bundle/deploy/metadata"
|
||||||
"github.com/databricks/cli/bundle/deploy/terraform"
|
"github.com/databricks/cli/bundle/deploy/terraform"
|
||||||
"github.com/databricks/cli/bundle/permissions"
|
"github.com/databricks/cli/bundle/permissions"
|
||||||
|
@ -20,6 +18,10 @@ func Initialize() bundle.Mutator {
|
||||||
return newPhase(
|
return newPhase(
|
||||||
"initialize",
|
"initialize",
|
||||||
[]bundle.Mutator{
|
[]bundle.Mutator{
|
||||||
|
mutator.RewriteSyncPaths(),
|
||||||
|
mutator.MergeJobClusters(),
|
||||||
|
mutator.MergeJobTasks(),
|
||||||
|
mutator.MergePipelineClusters(),
|
||||||
mutator.InitializeWorkspaceClient(),
|
mutator.InitializeWorkspaceClient(),
|
||||||
mutator.PopulateCurrentUser(),
|
mutator.PopulateCurrentUser(),
|
||||||
mutator.DefineDefaultWorkspaceRoot(),
|
mutator.DefineDefaultWorkspaceRoot(),
|
||||||
|
@ -27,10 +29,10 @@ func Initialize() bundle.Mutator {
|
||||||
mutator.DefineDefaultWorkspacePaths(),
|
mutator.DefineDefaultWorkspacePaths(),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
mutator.ResolveResourceReferences(),
|
mutator.ResolveResourceReferences(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath("bundle"),
|
"bundle",
|
||||||
interpolation.IncludeLookupsInPath("workspace"),
|
"workspace",
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
),
|
),
|
||||||
mutator.SetRunAs(),
|
mutator.SetRunAs(),
|
||||||
mutator.OverrideCompute(),
|
mutator.OverrideCompute(),
|
||||||
|
|
|
@ -5,30 +5,34 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
|
||||||
"github.com/databricks/cli/bundle/config/mutator"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/phases"
|
"github.com/databricks/cli/bundle/phases"
|
||||||
"github.com/databricks/cli/cmd/root"
|
"github.com/databricks/databricks-sdk-go/config"
|
||||||
|
"github.com/databricks/databricks-sdk-go/experimental/mocks"
|
||||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestExpandPipelineGlobPathsWithNonExistent(t *testing.T) {
|
func TestExpandPipelineGlobPathsWithNonExistent(t *testing.T) {
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
ctx = root.SetWorkspaceClient(ctx, nil)
|
|
||||||
|
|
||||||
b, err := bundle.Load(ctx, "./pipeline_glob_paths")
|
b, err := bundle.Load(ctx, "./pipeline_glob_paths")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget("default")...))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
b.Config.Bundle.Target = "default"
|
|
||||||
|
|
||||||
b.Config.Workspace.CurrentUser = &config.User{User: &iam.User{UserName: "user@domain.com"}}
|
// Configure mock workspace client
|
||||||
b.WorkspaceClient()
|
m := mocks.NewMockWorkspaceClient(t)
|
||||||
|
m.WorkspaceClient.Config = &config.Config{
|
||||||
|
Host: "https://mock.databricks.workspace.com",
|
||||||
|
}
|
||||||
|
m.GetMockCurrentUserAPI().EXPECT().Me(mock.Anything).Return(&iam.User{
|
||||||
|
UserName: "user@domain.com",
|
||||||
|
}, nil)
|
||||||
|
b.SetWorkpaceClient(m.WorkspaceClient)
|
||||||
|
|
||||||
m := phases.Initialize()
|
err = bundle.Apply(ctx, b, phases.Initialize())
|
||||||
err = bundle.Apply(ctx, b, m)
|
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
require.ErrorContains(t, err, "notebook ./non-existent not found")
|
require.ErrorContains(t, err, "notebook ./non-existent not found")
|
||||||
|
|
||||||
|
|
|
@ -28,8 +28,6 @@ environments:
|
||||||
|
|
||||||
pipelines:
|
pipelines:
|
||||||
boolean1:
|
boolean1:
|
||||||
# Note: setting a property to a zero value (in Go) does not have effect.
|
|
||||||
# See the corresponding test for details.
|
|
||||||
photon: false
|
photon: false
|
||||||
|
|
||||||
boolean2:
|
boolean2:
|
||||||
|
|
|
@ -29,10 +29,7 @@ func TestEnvironmentOverridesResourcesStaging(t *testing.T) {
|
||||||
b := loadTarget(t, "./environment_overrides/resources", "staging")
|
b := loadTarget(t, "./environment_overrides/resources", "staging")
|
||||||
assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name)
|
assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name)
|
||||||
|
|
||||||
// Overrides are only applied if they are not zero-valued.
|
// Override values are applied in the staging environment.
|
||||||
// This means that in its current form, we cannot override a true value with a false value.
|
assert.Equal(t, false, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||||
// Note: this is not desirable and will be addressed by representing our configuration
|
|
||||||
// in a different structure (e.g. with cty), instead of Go structs.
|
|
||||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon)
|
|
||||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon)
|
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,16 +5,16 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config/interpolation"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestInterpolation(t *testing.T) {
|
func TestInterpolation(t *testing.T) {
|
||||||
b := load(t, "./interpolation")
|
b := load(t, "./interpolation")
|
||||||
err := bundle.Apply(context.Background(), b, interpolation.Interpolate(
|
err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath("bundle"),
|
"bundle",
|
||||||
interpolation.IncludeLookupsInPath("workspace"),
|
"workspace",
|
||||||
))
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
||||||
|
@ -23,9 +23,9 @@ func TestInterpolation(t *testing.T) {
|
||||||
|
|
||||||
func TestInterpolationWithTarget(t *testing.T) {
|
func TestInterpolationWithTarget(t *testing.T) {
|
||||||
b := loadTarget(t, "./interpolation_target", "development")
|
b := loadTarget(t, "./interpolation_target", "development")
|
||||||
err := bundle.Apply(context.Background(), b, interpolation.Interpolate(
|
err := bundle.Apply(context.Background(), b, mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath("bundle"),
|
"bundle",
|
||||||
interpolation.IncludeLookupsInPath("workspace"),
|
"workspace",
|
||||||
))
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestJobWithSparkConf(t *testing.T) {
|
func TestJobWithSparkConf(t *testing.T) {
|
||||||
|
@ -14,9 +15,17 @@ func TestJobWithSparkConf(t *testing.T) {
|
||||||
assert.Len(t, job.JobClusters, 1)
|
assert.Len(t, job.JobClusters, 1)
|
||||||
assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey)
|
assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey)
|
||||||
|
|
||||||
// Existing behavior is such that including non-string values
|
// This test exists because of https://github.com/databricks/cli/issues/992.
|
||||||
// in the spark_conf map will cause the job to fail to load.
|
// It is solved for bundles as of https://github.com/databricks/cli/pull/1098.
|
||||||
// This is expected to be solved once we switch to the custom YAML loader.
|
require.Len(t, job.JobClusters, 1)
|
||||||
tasks := job.Tasks
|
cluster := job.JobClusters[0]
|
||||||
assert.Len(t, tasks, 0, "see https://github.com/databricks/cli/issues/992")
|
assert.Equal(t, "14.2.x-scala2.12", cluster.NewCluster.SparkVersion)
|
||||||
|
assert.Equal(t, "i3.xlarge", cluster.NewCluster.NodeTypeId)
|
||||||
|
assert.Equal(t, 2, cluster.NewCluster.NumWorkers)
|
||||||
|
assert.Equal(t, map[string]string{
|
||||||
|
"spark.string": "string",
|
||||||
|
"spark.int": "1",
|
||||||
|
"spark.bool": "true",
|
||||||
|
"spark.float": "1.2",
|
||||||
|
}, cluster.NewCluster.SparkConf)
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,8 +19,17 @@ func load(t *testing.T, path string) *bundle.Bundle {
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadTarget(t *testing.T, path, env string) *bundle.Bundle {
|
func loadTarget(t *testing.T, path, env string) *bundle.Bundle {
|
||||||
b := load(t, path)
|
ctx := context.Background()
|
||||||
err := bundle.Apply(context.Background(), b, mutator.SelectTarget(env))
|
b, err := bundle.Load(ctx, path)
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutatorsForTarget(env)...))
|
||||||
|
require.NoError(t, err)
|
||||||
|
err = bundle.Apply(ctx, b, bundle.Seq(
|
||||||
|
mutator.RewriteSyncPaths(),
|
||||||
|
mutator.MergeJobClusters(),
|
||||||
|
mutator.MergeJobTasks(),
|
||||||
|
mutator.MergePipelineClusters(),
|
||||||
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
return b
|
return b
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,40 +1,38 @@
|
||||||
package config_tests
|
package config_tests
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestOverrideSyncTarget(t *testing.T) {
|
func TestOverrideSyncTarget(t *testing.T) {
|
||||||
b := load(t, "./override_sync")
|
var b *bundle.Bundle
|
||||||
assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include)
|
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync", "development")
|
b = loadTarget(t, "./override_sync", "development")
|
||||||
assert.ElementsMatch(t, []string{"src/*", "tests/*"}, b.Config.Sync.Include)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
|
||||||
assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync", "staging")
|
b = loadTarget(t, "./override_sync", "staging")
|
||||||
assert.ElementsMatch(t, []string{"src/*", "fixtures/*"}, b.Config.Sync.Include)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*"), filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync", "prod")
|
b = loadTarget(t, "./override_sync", "prod")
|
||||||
assert.ElementsMatch(t, []string{"src/*"}, b.Config.Sync.Include)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("src/*")}, b.Config.Sync.Include)
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestOverrideSyncTargetNoRootSync(t *testing.T) {
|
func TestOverrideSyncTargetNoRootSync(t *testing.T) {
|
||||||
b := load(t, "./override_sync_no_root")
|
var b *bundle.Bundle
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Include)
|
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync_no_root", "development")
|
b = loadTarget(t, "./override_sync_no_root", "development")
|
||||||
assert.ElementsMatch(t, []string{"tests/*"}, b.Config.Sync.Include)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("tests/*")}, b.Config.Sync.Include)
|
||||||
assert.ElementsMatch(t, []string{"dist"}, b.Config.Sync.Exclude)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("dist")}, b.Config.Sync.Exclude)
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync_no_root", "staging")
|
b = loadTarget(t, "./override_sync_no_root", "staging")
|
||||||
assert.ElementsMatch(t, []string{"fixtures/*"}, b.Config.Sync.Include)
|
assert.ElementsMatch(t, []string{filepath.FromSlash("fixtures/*")}, b.Config.Sync.Include)
|
||||||
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
assert.ElementsMatch(t, []string{}, b.Config.Sync.Exclude)
|
||||||
|
|
||||||
b = loadTarget(t, "./override_sync_no_root", "prod")
|
b = loadTarget(t, "./override_sync_no_root", "prod")
|
||||||
|
|
|
@ -11,7 +11,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRelativePathsWithIncludes(t *testing.T) {
|
func TestRelativePathsWithIncludes(t *testing.T) {
|
||||||
b := load(t, "./relative_path_with_includes")
|
b := loadTarget(t, "./relative_path_with_includes", "default")
|
||||||
|
|
||||||
m := mutator.TranslatePaths()
|
m := mutator.TranslatePaths()
|
||||||
err := bundle.Apply(context.Background(), b, m)
|
err := bundle.Apply(context.Background(), b, m)
|
||||||
|
@ -20,8 +20,22 @@ func TestRelativePathsWithIncludes(t *testing.T) {
|
||||||
assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path)
|
assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path)
|
||||||
assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path)
|
assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path)
|
||||||
|
|
||||||
assert.ElementsMatch(t, []string{"./folder_a/*.*", filepath.Join("subfolder", "folder_c", "*.*")}, b.Config.Sync.Include)
|
assert.ElementsMatch(
|
||||||
assert.ElementsMatch(t, []string{"./folder_b/*.*", filepath.Join("subfolder", "folder_d", "*.*")}, b.Config.Sync.Exclude)
|
t,
|
||||||
|
[]string{
|
||||||
|
filepath.Join("folder_a", "*.*"),
|
||||||
|
filepath.Join("subfolder", "folder_c", "*.*"),
|
||||||
|
},
|
||||||
|
b.Config.Sync.Include,
|
||||||
|
)
|
||||||
|
assert.ElementsMatch(
|
||||||
|
t,
|
||||||
|
[]string{
|
||||||
|
filepath.Join("folder_b", "*.*"),
|
||||||
|
filepath.Join("subfolder", "folder_d", "*.*"),
|
||||||
|
},
|
||||||
|
b.Config.Sync.Exclude,
|
||||||
|
)
|
||||||
|
|
||||||
assert.Equal(t, filepath.Join("dist", "job_a.whl"), b.Config.Resources.Jobs["job_a"].Tasks[0].Libraries[0].Whl)
|
assert.Equal(t, filepath.Join("dist", "job_a.whl"), b.Config.Resources.Jobs["job_a"].Tasks[0].Libraries[0].Whl)
|
||||||
assert.Equal(t, filepath.Join("subfolder", "dist", "job_b.whl"), b.Config.Resources.Jobs["job_b"].Tasks[0].Libraries[0].Whl)
|
assert.Equal(t, filepath.Join("subfolder", "dist", "job_b.whl"), b.Config.Resources.Jobs["job_b"].Tasks[0].Libraries[0].Whl)
|
||||||
|
|
|
@ -13,30 +13,42 @@ targets:
|
||||||
resources:
|
resources:
|
||||||
pipelines:
|
pipelines:
|
||||||
nyc_taxi_pipeline:
|
nyc_taxi_pipeline:
|
||||||
|
name: "nyc taxi loader"
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
- level: CAN_VIEW
|
- level: CAN_VIEW
|
||||||
service_principal_name: my_service_principal
|
service_principal_name: my_service_principal
|
||||||
- level: CAN_VIEW
|
- level: CAN_VIEW
|
||||||
user_name: my_user_name
|
user_name: my_user_name
|
||||||
name: "nyc taxi loader"
|
|
||||||
libraries:
|
libraries:
|
||||||
- notebook:
|
- notebook:
|
||||||
path: ./dlt/nyc_taxi_loader
|
path: ./dlt/nyc_taxi_loader
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
job_one:
|
job_one:
|
||||||
name: Job One
|
name: Job One
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- task:
|
- task_key: "task_one"
|
||||||
|
notebook_task:
|
||||||
notebook_path: "./test.py"
|
notebook_path: "./test.py"
|
||||||
|
|
||||||
job_two:
|
job_two:
|
||||||
name: Job Two
|
name: Job Two
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- task:
|
- task_key: "task_two"
|
||||||
|
notebook_task:
|
||||||
notebook_path: "./test.py"
|
notebook_path: "./test.py"
|
||||||
|
|
||||||
job_three:
|
job_three:
|
||||||
name: Job Three
|
name: Job Three
|
||||||
|
|
||||||
run_as:
|
run_as:
|
||||||
service_principal_name: "my_service_principal_for_job"
|
service_principal_name: "my_service_principal_for_job"
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- task:
|
- task_key: "task_three"
|
||||||
|
notebook_task:
|
||||||
notebook_path: "./test.py"
|
notebook_path: "./test.py"
|
||||||
|
|
|
@ -13,12 +13,17 @@ import (
|
||||||
|
|
||||||
func TestRunAsDefault(t *testing.T) {
|
func TestRunAsDefault(t *testing.T) {
|
||||||
b := load(t, "./run_as")
|
b := load(t, "./run_as")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
b.Config.Workspace.CurrentUser = &config.User{
|
b.Config.Workspace.CurrentUser = &config.User{
|
||||||
User: &iam.User{
|
User: &iam.User{
|
||||||
UserName: "jane@doe.com",
|
UserName: "jane@doe.com",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
ctx := context.Background()
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
err := bundle.Apply(ctx, b, mutator.SetRunAs())
|
err := bundle.Apply(ctx, b, mutator.SetRunAs())
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
@ -39,21 +44,26 @@ func TestRunAsDefault(t *testing.T) {
|
||||||
|
|
||||||
pipelines := b.Config.Resources.Pipelines
|
pipelines := b.Config.Resources.Pipelines
|
||||||
assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2)
|
assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW")
|
assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].UserName, "my_user_name")
|
assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[0].UserName)
|
||||||
|
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER")
|
assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName, "my_service_principal")
|
assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRunAsDevelopment(t *testing.T) {
|
func TestRunAsDevelopment(t *testing.T) {
|
||||||
b := loadTarget(t, "./run_as", "development")
|
b := loadTarget(t, "./run_as", "development")
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
b.Config.Workspace.CurrentUser = &config.User{
|
b.Config.Workspace.CurrentUser = &config.User{
|
||||||
User: &iam.User{
|
User: &iam.User{
|
||||||
UserName: "jane@doe.com",
|
UserName: "jane@doe.com",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
ctx := context.Background()
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
err := bundle.Apply(ctx, b, mutator.SetRunAs())
|
err := bundle.Apply(ctx, b, mutator.SetRunAs())
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
@ -74,9 +84,9 @@ func TestRunAsDevelopment(t *testing.T) {
|
||||||
|
|
||||||
pipelines := b.Config.Resources.Pipelines
|
pipelines := b.Config.Resources.Pipelines
|
||||||
assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2)
|
assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW")
|
assert.Equal(t, "CAN_VIEW", pipelines["nyc_taxi_pipeline"].Permissions[0].Level)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName, "my_service_principal")
|
assert.Equal(t, "my_service_principal", pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName)
|
||||||
|
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER")
|
assert.Equal(t, "IS_OWNER", pipelines["nyc_taxi_pipeline"].Permissions[1].Level)
|
||||||
assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].UserName, "my_user_name")
|
assert.Equal(t, "my_user_name", pipelines["nyc_taxi_pipeline"].Permissions[1].UserName)
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,9 +5,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config/interpolation"
|
|
||||||
"github.com/databricks/cli/bundle/config/mutator"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -17,9 +15,10 @@ func TestVariables(t *testing.T) {
|
||||||
b := load(t, "./variables/vanilla")
|
b := load(t, "./variables/vanilla")
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "abc def", b.Config.Bundle.Name)
|
assert.Equal(t, "abc def", b.Config.Bundle.Name)
|
||||||
}
|
}
|
||||||
|
@ -28,9 +27,10 @@ func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) {
|
||||||
b := load(t, "./variables/vanilla")
|
b := load(t, "./variables/vanilla")
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,9 +39,10 @@ func TestVariablesTargetsBlockOverride(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-with-single-variable-override"),
|
mutator.SelectTarget("env-with-single-variable-override"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile)
|
assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile)
|
||||||
}
|
}
|
||||||
|
@ -51,9 +52,10 @@ func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-with-two-variable-overrides"),
|
mutator.SelectTarget("env-with-two-variable-overrides"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile)
|
assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile)
|
||||||
}
|
}
|
||||||
|
@ -64,9 +66,10 @@ func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-with-two-variable-overrides"),
|
mutator.SelectTarget("env-with-two-variable-overrides"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile)
|
assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile)
|
||||||
}
|
}
|
||||||
|
@ -76,9 +79,10 @@ func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-missing-a-required-variable-assignment"),
|
mutator.SelectTarget("env-missing-a-required-variable-assignment"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -87,9 +91,10 @@ func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-using-an-undefined-variable"),
|
mutator.SelectTarget("env-using-an-undefined-variable"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
mutator.ResolveVariableReferences(
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
"variables",
|
||||||
)))
|
),
|
||||||
|
))
|
||||||
assert.ErrorContains(t, err, "variable c is not defined but is assigned a value")
|
assert.ErrorContains(t, err, "variable c is not defined but is assigned a value")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,9 +115,7 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) {
|
||||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||||
mutator.SelectTarget("env-overrides-lookup"),
|
mutator.SelectTarget("env-overrides-lookup"),
|
||||||
mutator.SetVariables(),
|
mutator.SetVariables(),
|
||||||
interpolation.Interpolate(
|
))
|
||||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
|
||||||
)))
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "cluster: some-test-cluster", b.Config.Variables["d"].Lookup.String())
|
assert.Equal(t, "cluster: some-test-cluster", b.Config.Variables["d"].Lookup.String())
|
||||||
assert.Equal(t, "instance-pool: some-test-instance-pool", b.Config.Variables["e"].Lookup.String())
|
assert.Equal(t, "instance-pool: some-test-instance-pool", b.Config.Variables["e"].Lookup.String())
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package bundle
|
package bundle
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/phases"
|
"github.com/databricks/cli/bundle/phases"
|
||||||
"github.com/databricks/cli/cmd/bundle/utils"
|
"github.com/databricks/cli/cmd/bundle/utils"
|
||||||
|
@ -24,8 +26,10 @@ func newDeployCommand() *cobra.Command {
|
||||||
cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.")
|
cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.")
|
||||||
|
|
||||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||||
b := bundle.Get(cmd.Context())
|
ctx := cmd.Context()
|
||||||
|
b := bundle.Get(ctx)
|
||||||
|
|
||||||
|
bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error {
|
||||||
b.Config.Bundle.Force = force
|
b.Config.Bundle.Force = force
|
||||||
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
||||||
b.Config.Bundle.ComputeID = computeID
|
b.Config.Bundle.ComputeID = computeID
|
||||||
|
@ -34,7 +38,10 @@ func newDeployCommand() *cobra.Command {
|
||||||
b.Config.Bundle.Deployment.FailOnActiveRuns = failOnActiveRuns
|
b.Config.Bundle.Deployment.FailOnActiveRuns = failOnActiveRuns
|
||||||
}
|
}
|
||||||
|
|
||||||
return bundle.Apply(cmd.Context(), b, bundle.Seq(
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
return bundle.Apply(ctx, b, bundle.Seq(
|
||||||
phases.Initialize(),
|
phases.Initialize(),
|
||||||
phases.Build(),
|
phases.Build(),
|
||||||
phases.Deploy(),
|
phases.Deploy(),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package deployment
|
package deployment
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
|
@ -25,15 +26,14 @@ func newBindCommand() *cobra.Command {
|
||||||
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
|
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
|
||||||
|
|
||||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||||
b := bundle.Get(cmd.Context())
|
ctx := cmd.Context()
|
||||||
r := b.Config.Resources
|
b := bundle.Get(ctx)
|
||||||
resource, err := r.FindResourceByConfigKey(args[0])
|
resource, err := b.Config.Resources.FindResourceByConfigKey(args[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
w := b.WorkspaceClient()
|
w := b.WorkspaceClient()
|
||||||
ctx := cmd.Context()
|
|
||||||
exists, err := resource.Exists(ctx, w, args[1])
|
exists, err := resource.Exists(ctx, w, args[1])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to fetch the resource, err: %w", err)
|
return fmt.Errorf("failed to fetch the resource, err: %w", err)
|
||||||
|
@ -43,8 +43,12 @@ func newBindCommand() *cobra.Command {
|
||||||
return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1])
|
return fmt.Errorf("%s with an id '%s' is not found", resource.TerraformResourceName(), args[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error {
|
||||||
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
||||||
err = bundle.Apply(cmd.Context(), b, bundle.Seq(
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
err = bundle.Apply(ctx, b, bundle.Seq(
|
||||||
phases.Initialize(),
|
phases.Initialize(),
|
||||||
phases.Bind(&terraform.BindOptions{
|
phases.Bind(&terraform.BindOptions{
|
||||||
AutoApprove: autoApprove,
|
AutoApprove: autoApprove,
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package deployment
|
package deployment
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/phases"
|
"github.com/databricks/cli/bundle/phases"
|
||||||
"github.com/databricks/cli/cmd/bundle/utils"
|
"github.com/databricks/cli/cmd/bundle/utils"
|
||||||
|
@ -19,14 +21,18 @@ func newUnbindCommand() *cobra.Command {
|
||||||
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
|
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
|
||||||
|
|
||||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||||
b := bundle.Get(cmd.Context())
|
ctx := cmd.Context()
|
||||||
r := b.Config.Resources
|
b := bundle.Get(ctx)
|
||||||
resource, err := r.FindResourceByConfigKey(args[0])
|
resource, err := b.Config.Resources.FindResourceByConfigKey(args[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bundle.ApplyFunc(ctx, b, func(context.Context, *bundle.Bundle) error {
|
||||||
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
b.Config.Bundle.Deployment.Lock.Force = forceLock
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
return bundle.Apply(cmd.Context(), b, bundle.Seq(
|
return bundle.Apply(cmd.Context(), b, bundle.Seq(
|
||||||
phases.Initialize(),
|
phases.Initialize(),
|
||||||
phases.Unbind(resource.TerraformResourceName(), args[0]),
|
phases.Unbind(resource.TerraformResourceName(), args[0]),
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package bundle
|
package bundle
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
@ -30,12 +31,16 @@ func newDestroyCommand() *cobra.Command {
|
||||||
ctx := cmd.Context()
|
ctx := cmd.Context()
|
||||||
b := bundle.Get(ctx)
|
b := bundle.Get(ctx)
|
||||||
|
|
||||||
|
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
// If `--force-lock` is specified, force acquisition of the deployment lock.
|
// If `--force-lock` is specified, force acquisition of the deployment lock.
|
||||||
b.Config.Bundle.Deployment.Lock.Force = forceDestroy
|
b.Config.Bundle.Deployment.Lock.Force = forceDestroy
|
||||||
|
|
||||||
// If `--auto-approve`` is specified, we skip confirmation checks
|
// If `--auto-approve`` is specified, we skip confirmation checks
|
||||||
b.AutoApprove = autoApprove
|
b.AutoApprove = autoApprove
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
// we require auto-approve for non tty terminals since interactive consent
|
// we require auto-approve for non tty terminals since interactive consent
|
||||||
// is not possible
|
// is not possible
|
||||||
if !term.IsTerminal(int(os.Stderr.Fd())) && !autoApprove {
|
if !term.IsTerminal(int(os.Stderr.Fd())) && !autoApprove {
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package utils
|
package utils
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/cmd/root"
|
"github.com/databricks/cli/cmd/root"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
@ -20,5 +22,7 @@ func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error {
|
||||||
|
|
||||||
// Initialize variables by assigning them values passed as command line flags
|
// Initialize variables by assigning them values passed as command line flags
|
||||||
b := bundle.Get(cmd.Context())
|
b := bundle.Get(cmd.Context())
|
||||||
|
return bundle.ApplyFunc(cmd.Context(), b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
return b.Config.InitializeVariables(variables)
|
return b.Config.InitializeVariables(variables)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ import (
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/phases"
|
"github.com/databricks/cli/bundle/phases"
|
||||||
"github.com/databricks/cli/cmd/bundle/utils"
|
"github.com/databricks/cli/cmd/bundle/utils"
|
||||||
|
"github.com/databricks/cli/libs/log"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -25,6 +26,12 @@ func newValidateCommand() *cobra.Command {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Until we change up the output of this command to be a text representation,
|
||||||
|
// we'll just output all diagnostics as debug logs.
|
||||||
|
for _, diag := range b.Config.Diagnostics() {
|
||||||
|
log.Debugf(cmd.Context(), "[%s]: %s", diag.Location, diag.Summary)
|
||||||
|
}
|
||||||
|
|
||||||
buf, err := json.MarshalIndent(b.Config, "", " ")
|
buf, err := json.MarshalIndent(b.Config, "", " ")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -64,7 +64,13 @@ func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context
|
||||||
|
|
||||||
profile := getProfile(cmd)
|
profile := getProfile(cmd)
|
||||||
if profile != "" {
|
if profile != "" {
|
||||||
|
err = bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
b.Config.Workspace.Profile = profile
|
b.Config.Workspace.Profile = profile
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
||||||
|
|
1
go.mod
1
go.mod
|
@ -12,7 +12,6 @@ require (
|
||||||
github.com/hashicorp/hc-install v0.6.3 // MPL 2.0
|
github.com/hashicorp/hc-install v0.6.3 // MPL 2.0
|
||||||
github.com/hashicorp/terraform-exec v0.20.0 // MPL 2.0
|
github.com/hashicorp/terraform-exec v0.20.0 // MPL 2.0
|
||||||
github.com/hashicorp/terraform-json v0.21.0 // MPL 2.0
|
github.com/hashicorp/terraform-json v0.21.0 // MPL 2.0
|
||||||
github.com/imdario/mergo v0.3.15 // BSD-3-Clause
|
|
||||||
github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause
|
github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause
|
||||||
github.com/mattn/go-isatty v0.0.20 // MIT
|
github.com/mattn/go-isatty v0.0.20 // MIT
|
||||||
github.com/nwidger/jsoncolor v0.3.2 // MIT
|
github.com/nwidger/jsoncolor v0.3.2 // MIT
|
||||||
|
|
|
@ -106,8 +106,6 @@ github.com/hashicorp/terraform-exec v0.20.0 h1:DIZnPsqzPGuUnq6cH8jWcPunBfY+C+M8J
|
||||||
github.com/hashicorp/terraform-exec v0.20.0/go.mod h1:ckKGkJWbsNqFKV1itgMnE0hY9IYf1HoiekpuN0eWoDw=
|
github.com/hashicorp/terraform-exec v0.20.0/go.mod h1:ckKGkJWbsNqFKV1itgMnE0hY9IYf1HoiekpuN0eWoDw=
|
||||||
github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U=
|
github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U=
|
||||||
github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk=
|
github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk=
|
||||||
github.com/imdario/mergo v0.3.15 h1:M8XP7IuFNsqUx6VPK2P9OSmsYsI/YFaGil0uD21V3dM=
|
|
||||||
github.com/imdario/mergo v0.3.15/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY=
|
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||||
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
|
||||||
|
|
|
@ -33,15 +33,6 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
|
||||||
whlPath := filepath.Join(dir, "dist", "test.whl")
|
whlPath := filepath.Join(dir, "dist", "test.whl")
|
||||||
touchEmptyFile(t, whlPath)
|
touchEmptyFile(t, whlPath)
|
||||||
|
|
||||||
artifact := &config.Artifact{
|
|
||||||
Type: "whl",
|
|
||||||
Files: []config.ArtifactFile{
|
|
||||||
{
|
|
||||||
Source: whlPath,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
wsDir := internal.TemporaryWorkspaceDir(t, w)
|
wsDir := internal.TemporaryWorkspaceDir(t, w)
|
||||||
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
|
@ -54,7 +45,14 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
|
||||||
ArtifactPath: wsDir,
|
ArtifactPath: wsDir,
|
||||||
},
|
},
|
||||||
Artifacts: config.Artifacts{
|
Artifacts: config.Artifacts{
|
||||||
"test": artifact,
|
"test": &config.Artifact{
|
||||||
|
Type: "whl",
|
||||||
|
Files: []config.ArtifactFile{
|
||||||
|
{
|
||||||
|
Source: whlPath,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
Jobs: map[string]*resources.Job{
|
Jobs: map[string]*resources.Job{
|
||||||
|
@ -80,9 +78,14 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// The remote path attribute on the artifact file should have been set.
|
// The remote path attribute on the artifact file should have been set.
|
||||||
require.Regexp(t, regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), artifact.Files[0].RemotePath)
|
require.Regexp(t,
|
||||||
|
regexp.MustCompile(path.Join(regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||||
|
b.Config.Artifacts["test"].Files[0].RemotePath,
|
||||||
|
)
|
||||||
|
|
||||||
// The task library path should have been updated to the remote path.
|
// The task library path should have been updated to the remote path.
|
||||||
lib := b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0]
|
require.Regexp(t,
|
||||||
require.Regexp(t, regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)), lib.Whl)
|
regexp.MustCompile(path.Join("/Workspace", regexp.QuoteMeta(wsDir), `.internal/test\.whl`)),
|
||||||
|
b.Config.Resources.Jobs["test"].JobSettings.Tasks[0].Libraries[0].Whl,
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,67 @@
|
||||||
|
package merge
|
||||||
|
|
||||||
|
import "github.com/databricks/cli/libs/dyn"
|
||||||
|
|
||||||
|
type elementsByKey struct {
|
||||||
|
key string
|
||||||
|
keyFunc func(dyn.Value) string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e elementsByKey) Map(v dyn.Value) (dyn.Value, error) {
|
||||||
|
// We know the type of this value is a sequence.
|
||||||
|
// For additional defence, return self if it is not.
|
||||||
|
elements, ok := v.AsSequence()
|
||||||
|
if !ok {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
seen := make(map[string]dyn.Value, len(elements))
|
||||||
|
keys := make([]string, 0, len(elements))
|
||||||
|
|
||||||
|
// Iterate in natural order. For a given key, we first see the
|
||||||
|
// base definition and merge instances that come after it.
|
||||||
|
for i := range elements {
|
||||||
|
kv := elements[i].Get(e.key)
|
||||||
|
key := e.keyFunc(kv)
|
||||||
|
|
||||||
|
// Register element with key if not yet seen before.
|
||||||
|
ref, ok := seen[key]
|
||||||
|
if !ok {
|
||||||
|
keys = append(keys, key)
|
||||||
|
seen[key] = elements[i]
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge this instance into the reference.
|
||||||
|
nv, err := Merge(ref, elements[i])
|
||||||
|
if err != nil {
|
||||||
|
return v, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite reference.
|
||||||
|
seen[key] = nv
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gather resulting elements in natural order.
|
||||||
|
out := make([]dyn.Value, 0, len(keys))
|
||||||
|
for _, key := range keys {
|
||||||
|
nv, err := dyn.Set(seen[key], e.key, dyn.V(key))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
out = append(out, nv)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.NewValue(out, v.Location()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ElementsByKey returns a [dyn.MapFunc] that operates on a sequence
|
||||||
|
// where each element is a map. It groups elements by a key and merges
|
||||||
|
// elements with the same key.
|
||||||
|
//
|
||||||
|
// The function that extracts the key from an element is provided as
|
||||||
|
// a parameter. The resulting elements get their key field overwritten
|
||||||
|
// with the value as returned by the key function.
|
||||||
|
func ElementsByKey(key string, keyFunc func(dyn.Value) string) dyn.MapFunc {
|
||||||
|
return elementsByKey{key, keyFunc}.Map
|
||||||
|
}
|
|
@ -0,0 +1,52 @@
|
||||||
|
package merge
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestElementByKey(t *testing.T) {
|
||||||
|
vin := dyn.V([]dyn.Value{
|
||||||
|
dyn.V(map[string]dyn.Value{
|
||||||
|
"key": dyn.V("foo"),
|
||||||
|
"value": dyn.V(42),
|
||||||
|
}),
|
||||||
|
dyn.V(map[string]dyn.Value{
|
||||||
|
"key": dyn.V("bar"),
|
||||||
|
"value": dyn.V(43),
|
||||||
|
}),
|
||||||
|
dyn.V(map[string]dyn.Value{
|
||||||
|
// Use upper case key to test that the resulting element has its
|
||||||
|
// key field assigned to the output of the key function.
|
||||||
|
// The key function in this test returns the lower case version of the key.
|
||||||
|
"key": dyn.V("FOO"),
|
||||||
|
"value": dyn.V(44),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
keyFunc := func(v dyn.Value) string {
|
||||||
|
return strings.ToLower(v.MustString())
|
||||||
|
}
|
||||||
|
|
||||||
|
vout, err := dyn.MapByPath(vin, dyn.EmptyPath, ElementsByKey("key", keyFunc))
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Len(t, vout.MustSequence(), 2)
|
||||||
|
assert.Equal(t,
|
||||||
|
vout.Index(0).AsAny(),
|
||||||
|
map[string]any{
|
||||||
|
"key": "foo",
|
||||||
|
"value": 44,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert.Equal(t,
|
||||||
|
vout.Index(1).AsAny(),
|
||||||
|
map[string]any{
|
||||||
|
"key": "bar",
|
||||||
|
"value": 43,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
|
@ -42,6 +42,15 @@ func NewValue(v any, loc Location) Value {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// WithLocation returns a new Value with its location set to the given value.
|
||||||
|
func (v Value) WithLocation(loc Location) Value {
|
||||||
|
return Value{
|
||||||
|
v: v.v,
|
||||||
|
k: v.k,
|
||||||
|
l: loc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func (v Value) Kind() Kind {
|
func (v Value) Kind() Kind {
|
||||||
return v.k
|
return v.k
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,7 +66,11 @@ func assertBuiltinTemplateValid(t *testing.T, settings map[string]any, target st
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
// Apply initialize / validation mutators
|
// Apply initialize / validation mutators
|
||||||
|
bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) error {
|
||||||
b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser}
|
b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
b.Tagging = tags.ForCloud(w.Config)
|
b.Tagging = tags.ForCloud(w.Config)
|
||||||
b.WorkspaceClient()
|
b.WorkspaceClient()
|
||||||
b.Config.Bundle.Terraform = &bundleConfig.Terraform{
|
b.Config.Bundle.Terraform = &bundleConfig.Terraform{
|
||||||
|
|
Loading…
Reference in New Issue