mirror of https://github.com/databricks/cli.git
Merge branch 'main' into transform-wheel-task
This commit is contained in:
commit
b10d50f521
|
@ -28,7 +28,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.5
|
||||
go-version: 1.21.0
|
||||
cache: true
|
||||
|
||||
- name: Set go env
|
||||
|
@ -56,9 +56,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
# Use 1.19 because of godoc formatting.
|
||||
# See https://tip.golang.org/doc/go1.19#go-doc.
|
||||
go-version: 1.19
|
||||
go-version: 1.21
|
||||
|
||||
# No need to download cached dependencies when running gofmt.
|
||||
cache: false
|
||||
|
|
|
@ -22,7 +22,7 @@ jobs:
|
|||
id: go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.5
|
||||
go-version: 1.21.0
|
||||
|
||||
- name: Locate cache paths
|
||||
id: cache
|
||||
|
|
|
@ -21,7 +21,7 @@ jobs:
|
|||
id: go
|
||||
uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: 1.19.5
|
||||
go-version: 1.21.0
|
||||
|
||||
- name: Locate cache paths
|
||||
id: cache
|
||||
|
|
60
CHANGELOG.md
60
CHANGELOG.md
|
@ -1,5 +1,65 @@
|
|||
# Version changelog
|
||||
|
||||
## 0.203.1
|
||||
|
||||
CLI:
|
||||
* Always resolve .databrickscfg file ([#659](https://github.com/databricks/cli/pull/659)).
|
||||
|
||||
Bundles:
|
||||
* Add internal tag for bundle fields to be skipped from schema ([#636](https://github.com/databricks/cli/pull/636)).
|
||||
* Log the bundle root configuration file if applicable ([#657](https://github.com/databricks/cli/pull/657)).
|
||||
* Execute paths without the .tmpl extension as templates ([#654](https://github.com/databricks/cli/pull/654)).
|
||||
* Enable environment overrides for job clusters ([#658](https://github.com/databricks/cli/pull/658)).
|
||||
* Merge artifacts and resources block with overrides enabled ([#660](https://github.com/databricks/cli/pull/660)).
|
||||
* Locked terraform binary version to <= 1.5.5 ([#666](https://github.com/databricks/cli/pull/666)).
|
||||
* Return better error messages for invalid JSON schema types in templates ([#661](https://github.com/databricks/cli/pull/661)).
|
||||
* Use custom prompter for bundle template inputs ([#663](https://github.com/databricks/cli/pull/663)).
|
||||
* Add map and pair helper functions for bundle templates ([#665](https://github.com/databricks/cli/pull/665)).
|
||||
* Correct name for force acquire deploy flag ([#656](https://github.com/databricks/cli/pull/656)).
|
||||
* Confirm that override with a zero value doesn't work ([#669](https://github.com/databricks/cli/pull/669)).
|
||||
|
||||
Internal:
|
||||
* Consolidate functions in libs/git ([#652](https://github.com/databricks/cli/pull/652)).
|
||||
* Upgraded Go version to 1.21 ([#664](https://github.com/databricks/cli/pull/664)).
|
||||
|
||||
## 0.203.0
|
||||
|
||||
CLI:
|
||||
* Infer host from profile during `auth login` ([#629](https://github.com/databricks/cli/pull/629)).
|
||||
|
||||
Bundles:
|
||||
* Extend deployment mode support ([#577](https://github.com/databricks/cli/pull/577)).
|
||||
* Add validation for Git settings in bundles ([#578](https://github.com/databricks/cli/pull/578)).
|
||||
* Only treat files with .tmpl extension as templates ([#594](https://github.com/databricks/cli/pull/594)).
|
||||
* Add JSON schema validation for input template parameters ([#598](https://github.com/databricks/cli/pull/598)).
|
||||
* Add DATABRICKS_BUNDLE_INCLUDE_PATHS to specify include paths through env vars ([#591](https://github.com/databricks/cli/pull/591)).
|
||||
* Initialise a empty default bundle if BUNDLE_ROOT and DATABRICKS_BUNDLE_INCLUDES env vars are present ([#604](https://github.com/databricks/cli/pull/604)).
|
||||
* Regenerate bundle resource structs from latest Terraform provider ([#633](https://github.com/databricks/cli/pull/633)).
|
||||
* Fixed processing jobs libraries with remote path ([#638](https://github.com/databricks/cli/pull/638)).
|
||||
* Add unit test for file name execution during rendering ([#640](https://github.com/databricks/cli/pull/640)).
|
||||
* Add bundle init command and support for prompting user for input values ([#631](https://github.com/databricks/cli/pull/631)).
|
||||
* Fix bundle git branch validation ([#645](https://github.com/databricks/cli/pull/645)).
|
||||
|
||||
Internal:
|
||||
* Fix mkdir integration test on GCP ([#620](https://github.com/databricks/cli/pull/620)).
|
||||
* Fix git clone integration test for non-existing repo ([#610](https://github.com/databricks/cli/pull/610)).
|
||||
* Remove push to main trigger for build workflow ([#621](https://github.com/databricks/cli/pull/621)).
|
||||
* Remove workflow to publish binaries to S3 ([#622](https://github.com/databricks/cli/pull/622)).
|
||||
* Fix failing fs mkdir test on azure ([#627](https://github.com/databricks/cli/pull/627)).
|
||||
* Print y/n options when displaying prompts using cmdio.Ask ([#650](https://github.com/databricks/cli/pull/650)).
|
||||
|
||||
API Changes:
|
||||
* Changed `databricks account metastore-assignments create` command to not return anything.
|
||||
* Added `databricks account network-policy` command group.
|
||||
|
||||
OpenAPI commit 7b57ba3a53f4de3d049b6a24391fe5474212daf8 (2023-07-28)
|
||||
|
||||
Dependency updates:
|
||||
* Bump OpenAPI specification & Go SDK Version ([#624](https://github.com/databricks/cli/pull/624)).
|
||||
* Bump golang.org/x/term from 0.10.0 to 0.11.0 ([#643](https://github.com/databricks/cli/pull/643)).
|
||||
* Bump golang.org/x/text from 0.11.0 to 0.12.0 ([#642](https://github.com/databricks/cli/pull/642)).
|
||||
* Bump golang.org/x/oauth2 from 0.10.0 to 0.11.0 ([#641](https://github.com/databricks/cli/pull/641)).
|
||||
|
||||
## 0.202.0
|
||||
|
||||
Breaking Change:
|
||||
|
|
|
@ -4,9 +4,10 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
|
||||
"slices"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
// all is an internal proxy for producing a list of mutators for all artifacts.
|
||||
|
|
|
@ -10,7 +10,9 @@ import (
|
|||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/libraries"
|
||||
"github.com/databricks/cli/libs/cmdio"
|
||||
"github.com/databricks/cli/libs/log"
|
||||
)
|
||||
|
||||
type detectPkg struct {
|
||||
|
@ -25,6 +27,11 @@ func (m *detectPkg) Name() string {
|
|||
}
|
||||
|
||||
func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
wheelTasks := libraries.FindAllWheelTasks(b)
|
||||
if len(wheelTasks) == 0 {
|
||||
log.Infof(ctx, "No wheel tasks in databricks.yml config, skipping auto detect")
|
||||
return nil
|
||||
}
|
||||
cmdio.LogString(ctx, "artifacts.whl.AutoDetect: Detecting Python wheel project...")
|
||||
|
||||
// checking if there is setup.py in the bundle root
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
package bundle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
@ -16,12 +17,15 @@ import (
|
|||
"github.com/databricks/cli/folders"
|
||||
"github.com/databricks/cli/libs/git"
|
||||
"github.com/databricks/cli/libs/locker"
|
||||
"github.com/databricks/cli/libs/log"
|
||||
"github.com/databricks/cli/libs/terraform"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
sdkconfig "github.com/databricks/databricks-sdk-go/config"
|
||||
"github.com/hashicorp/terraform-exec/tfexec"
|
||||
)
|
||||
|
||||
const internalFolder = ".internal"
|
||||
|
||||
type Bundle struct {
|
||||
Config config.Root
|
||||
|
||||
|
@ -45,7 +49,7 @@ type Bundle struct {
|
|||
|
||||
const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES"
|
||||
|
||||
func Load(path string) (*Bundle, error) {
|
||||
func Load(ctx context.Context, path string) (*Bundle, error) {
|
||||
bundle := &Bundle{}
|
||||
stat, err := os.Stat(path)
|
||||
if err != nil {
|
||||
|
@ -56,6 +60,7 @@ func Load(path string) (*Bundle, error) {
|
|||
_, hasIncludePathEnv := os.LookupEnv(ExtraIncludePathsKey)
|
||||
_, hasBundleRootEnv := os.LookupEnv(envBundleRoot)
|
||||
if hasIncludePathEnv && hasBundleRootEnv && stat.IsDir() {
|
||||
log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path)
|
||||
bundle.Config = config.Root{
|
||||
Path: path,
|
||||
Bundle: config.Bundle{
|
||||
|
@ -66,6 +71,7 @@ func Load(path string) (*Bundle, error) {
|
|||
}
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf(ctx, "Loading bundle configuration from: %s", configFile)
|
||||
err = bundle.Config.Load(configFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -75,19 +81,19 @@ func Load(path string) (*Bundle, error) {
|
|||
|
||||
// MustLoad returns a bundle configuration.
|
||||
// It returns an error if a bundle was not found or could not be loaded.
|
||||
func MustLoad() (*Bundle, error) {
|
||||
func MustLoad(ctx context.Context) (*Bundle, error) {
|
||||
root, err := mustGetRoot()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return Load(root)
|
||||
return Load(ctx, root)
|
||||
}
|
||||
|
||||
// TryLoad returns a bundle configuration if there is one, but doesn't fail if there isn't one.
|
||||
// It returns an error if a bundle was found but could not be loaded.
|
||||
// It returns a `nil` bundle if a bundle was not found.
|
||||
func TryLoad() (*Bundle, error) {
|
||||
func TryLoad(ctx context.Context) (*Bundle, error) {
|
||||
root, err := tryGetRoot()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -98,7 +104,7 @@ func TryLoad() (*Bundle, error) {
|
|||
return nil, nil
|
||||
}
|
||||
|
||||
return Load(root)
|
||||
return Load(ctx, root)
|
||||
}
|
||||
|
||||
func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient {
|
||||
|
@ -113,10 +119,10 @@ func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient {
|
|||
}
|
||||
|
||||
// CacheDir returns directory to use for temporary files for this bundle.
|
||||
// Scoped to the bundle's environment.
|
||||
// Scoped to the bundle's target.
|
||||
func (b *Bundle) CacheDir(paths ...string) (string, error) {
|
||||
if b.Config.Bundle.Environment == "" {
|
||||
panic("environment not set")
|
||||
if b.Config.Bundle.Target == "" {
|
||||
panic("target not set")
|
||||
}
|
||||
|
||||
cacheDirName, exists := os.LookupEnv("DATABRICKS_BUNDLE_TMP")
|
||||
|
@ -134,8 +140,8 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) {
|
|||
// Fixed components of the result path.
|
||||
parts := []string{
|
||||
cacheDirName,
|
||||
// Scope with environment name.
|
||||
b.Config.Bundle.Environment,
|
||||
// Scope with target name.
|
||||
b.Config.Bundle.Target,
|
||||
}
|
||||
|
||||
// Append dynamic components of the result path.
|
||||
|
@ -151,6 +157,38 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) {
|
|||
return dir, nil
|
||||
}
|
||||
|
||||
// This directory is used to store and automaticaly sync internal bundle files, such as, f.e
|
||||
// notebook trampoline files for Python wheel and etc.
|
||||
func (b *Bundle) InternalDir() (string, error) {
|
||||
cacheDir, err := b.CacheDir()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
dir := filepath.Join(cacheDir, internalFolder)
|
||||
err = os.MkdirAll(dir, 0700)
|
||||
if err != nil {
|
||||
return dir, err
|
||||
}
|
||||
|
||||
return dir, nil
|
||||
}
|
||||
|
||||
// GetSyncIncludePatterns returns a list of user defined includes
|
||||
// And also adds InternalDir folder to include list for sync command
|
||||
// so this folder is always synced
|
||||
func (b *Bundle) GetSyncIncludePatterns() ([]string, error) {
|
||||
internalDir, err := b.InternalDir()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
internalDirRel, err := filepath.Rel(b.Config.Path, internalDir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return append(b.Config.Sync.Include, filepath.ToSlash(filepath.Join(internalDirRel, "*.*"))), nil
|
||||
}
|
||||
|
||||
func (b *Bundle) GitRepository() (*git.Repository, error) {
|
||||
rootPath, err := folders.FindDirWithLeaf(b.Config.Path, ".git")
|
||||
if err != nil {
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package bundle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
@ -10,13 +11,13 @@ import (
|
|||
)
|
||||
|
||||
func TestLoadNotExists(t *testing.T) {
|
||||
b, err := Load("/doesntexist")
|
||||
b, err := Load(context.Background(), "/doesntexist")
|
||||
assert.True(t, os.IsNotExist(err))
|
||||
assert.Nil(t, b)
|
||||
}
|
||||
|
||||
func TestLoadExists(t *testing.T) {
|
||||
b, err := Load("./tests/basic")
|
||||
b, err := Load(context.Background(), "./tests/basic")
|
||||
require.Nil(t, err)
|
||||
assert.Equal(t, "basic", b.Config.Bundle.Name)
|
||||
}
|
||||
|
@ -27,19 +28,19 @@ func TestBundleCacheDir(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
f1.Close()
|
||||
|
||||
bundle, err := Load(projectDir)
|
||||
bundle, err := Load(context.Background(), projectDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Artificially set environment.
|
||||
// This is otherwise done by [mutators.SelectEnvironment].
|
||||
bundle.Config.Bundle.Environment = "default"
|
||||
// Artificially set target.
|
||||
// This is otherwise done by [mutators.SelectTarget].
|
||||
bundle.Config.Bundle.Target = "default"
|
||||
|
||||
// unset env variable in case it's set
|
||||
t.Setenv("DATABRICKS_BUNDLE_TMP", "")
|
||||
|
||||
cacheDir, err := bundle.CacheDir()
|
||||
|
||||
// format is <CWD>/.databricks/bundle/<environment>
|
||||
// format is <CWD>/.databricks/bundle/<target>
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, filepath.Join(projectDir, ".databricks", "bundle", "default"), cacheDir)
|
||||
}
|
||||
|
@ -51,58 +52,58 @@ func TestBundleCacheDirOverride(t *testing.T) {
|
|||
require.NoError(t, err)
|
||||
f1.Close()
|
||||
|
||||
bundle, err := Load(projectDir)
|
||||
bundle, err := Load(context.Background(), projectDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Artificially set environment.
|
||||
// This is otherwise done by [mutators.SelectEnvironment].
|
||||
bundle.Config.Bundle.Environment = "default"
|
||||
// Artificially set target.
|
||||
// This is otherwise done by [mutators.SelectTarget].
|
||||
bundle.Config.Bundle.Target = "default"
|
||||
|
||||
// now we expect to use 'bundleTmpDir' instead of CWD/.databricks/bundle
|
||||
t.Setenv("DATABRICKS_BUNDLE_TMP", bundleTmpDir)
|
||||
|
||||
cacheDir, err := bundle.CacheDir()
|
||||
|
||||
// format is <DATABRICKS_BUNDLE_TMP>/<environment>
|
||||
// format is <DATABRICKS_BUNDLE_TMP>/<target>
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, filepath.Join(bundleTmpDir, "default"), cacheDir)
|
||||
}
|
||||
|
||||
func TestBundleMustLoadSuccess(t *testing.T) {
|
||||
t.Setenv(envBundleRoot, "./tests/basic")
|
||||
b, err := MustLoad()
|
||||
b, err := MustLoad(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path))
|
||||
}
|
||||
|
||||
func TestBundleMustLoadFailureWithEnv(t *testing.T) {
|
||||
t.Setenv(envBundleRoot, "./tests/doesntexist")
|
||||
_, err := MustLoad()
|
||||
_, err := MustLoad(context.Background())
|
||||
require.Error(t, err, "not a directory")
|
||||
}
|
||||
|
||||
func TestBundleMustLoadFailureIfNotFound(t *testing.T) {
|
||||
chdir(t, t.TempDir())
|
||||
_, err := MustLoad()
|
||||
_, err := MustLoad(context.Background())
|
||||
require.Error(t, err, "unable to find bundle root")
|
||||
}
|
||||
|
||||
func TestBundleTryLoadSuccess(t *testing.T) {
|
||||
t.Setenv(envBundleRoot, "./tests/basic")
|
||||
b, err := TryLoad()
|
||||
b, err := TryLoad(context.Background())
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path))
|
||||
}
|
||||
|
||||
func TestBundleTryLoadFailureWithEnv(t *testing.T) {
|
||||
t.Setenv(envBundleRoot, "./tests/doesntexist")
|
||||
_, err := TryLoad()
|
||||
_, err := TryLoad(context.Background())
|
||||
require.Error(t, err, "not a directory")
|
||||
}
|
||||
|
||||
func TestBundleTryLoadOkIfNotFound(t *testing.T) {
|
||||
chdir(t, t.TempDir())
|
||||
b, err := TryLoad()
|
||||
b, err := TryLoad(context.Background())
|
||||
assert.NoError(t, err)
|
||||
assert.Nil(t, b)
|
||||
}
|
||||
|
|
|
@ -15,7 +15,10 @@ type Bundle struct {
|
|||
// Default warehouse to run SQL on.
|
||||
// DefaultWarehouse string `json:"default_warehouse,omitempty"`
|
||||
|
||||
// Environment is set by the mutator that selects the environment.
|
||||
// Target is set by the mutator that selects the target.
|
||||
Target string `json:"target,omitempty" bundle:"readonly"`
|
||||
|
||||
// DEPRECATED. Left for backward compatibility with Target
|
||||
Environment string `json:"environment,omitempty" bundle:"readonly"`
|
||||
|
||||
// Terraform holds configuration related to Terraform.
|
||||
|
@ -32,10 +35,10 @@ type Bundle struct {
|
|||
// origin url. Automatically loaded by reading .git directory if not specified
|
||||
Git Git `json:"git,omitempty"`
|
||||
|
||||
// Determines the mode of the environment.
|
||||
// Determines the mode of the target.
|
||||
// For example, 'mode: development' can be used for deployments for
|
||||
// development purposes.
|
||||
// Annotated readonly as this should be set at the environment level.
|
||||
// Annotated readonly as this should be set at the target level.
|
||||
Mode Mode `json:"mode,omitempty" bundle:"readonly"`
|
||||
|
||||
// Overrides the compute used for jobs and other supported assets.
|
||||
|
|
|
@ -9,10 +9,11 @@ import (
|
|||
"sort"
|
||||
"strings"
|
||||
|
||||
"slices"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config/variable"
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
const Delimiter = "."
|
||||
|
|
|
@ -3,9 +3,8 @@ package interpolation
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
// LookupFunction returns the value to rewrite a path expression to.
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
)
|
||||
|
||||
type defineDefaultEnvironment struct {
|
||||
name string
|
||||
}
|
||||
|
||||
// DefineDefaultEnvironment adds an environment named "default"
|
||||
// to the configuration if none have been defined.
|
||||
func DefineDefaultEnvironment() bundle.Mutator {
|
||||
return &defineDefaultEnvironment{
|
||||
name: "default",
|
||||
}
|
||||
}
|
||||
|
||||
func (m *defineDefaultEnvironment) Name() string {
|
||||
return fmt.Sprintf("DefineDefaultEnvironment(%s)", m.name)
|
||||
}
|
||||
|
||||
func (m *defineDefaultEnvironment) Apply(_ context.Context, b *bundle.Bundle) error {
|
||||
// Nothing to do if the configuration has at least 1 environment.
|
||||
if len(b.Config.Environments) > 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Define default environment.
|
||||
b.Config.Environments = make(map[string]*config.Environment)
|
||||
b.Config.Environments[m.name] = &config.Environment{}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,37 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
)
|
||||
|
||||
type defineDefaultTarget struct {
|
||||
name string
|
||||
}
|
||||
|
||||
// DefineDefaultTarget adds a target named "default"
|
||||
// to the configuration if none have been defined.
|
||||
func DefineDefaultTarget() bundle.Mutator {
|
||||
return &defineDefaultTarget{
|
||||
name: "default",
|
||||
}
|
||||
}
|
||||
|
||||
func (m *defineDefaultTarget) Name() string {
|
||||
return fmt.Sprintf("DefineDefaultTarget(%s)", m.name)
|
||||
}
|
||||
|
||||
func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) error {
|
||||
// Nothing to do if the configuration has at least 1 target.
|
||||
if len(b.Config.Targets) > 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Define default target.
|
||||
b.Config.Targets = make(map[string]*config.Target)
|
||||
b.Config.Targets[m.name] = &config.Target{}
|
||||
return nil
|
||||
}
|
|
@ -11,25 +11,25 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestDefaultEnvironment(t *testing.T) {
|
||||
func TestDefaultTarget(t *testing.T) {
|
||||
bundle := &bundle.Bundle{}
|
||||
err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
env, ok := bundle.Config.Environments["default"]
|
||||
env, ok := bundle.Config.Targets["default"]
|
||||
assert.True(t, ok)
|
||||
assert.Equal(t, &config.Environment{}, env)
|
||||
assert.Equal(t, &config.Target{}, env)
|
||||
}
|
||||
|
||||
func TestDefaultEnvironmentAlreadySpecified(t *testing.T) {
|
||||
func TestDefaultTargetAlreadySpecified(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
Targets: map[string]*config.Target{
|
||||
"development": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
_, ok := bundle.Config.Environments["default"]
|
||||
_, ok := bundle.Config.Targets["default"]
|
||||
assert.False(t, ok)
|
||||
}
|
|
@ -27,14 +27,14 @@ func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle
|
|||
return fmt.Errorf("unable to define default workspace root: bundle name not defined")
|
||||
}
|
||||
|
||||
if b.Config.Bundle.Environment == "" {
|
||||
return fmt.Errorf("unable to define default workspace root: bundle environment not selected")
|
||||
if b.Config.Bundle.Target == "" {
|
||||
return fmt.Errorf("unable to define default workspace root: bundle target not selected")
|
||||
}
|
||||
|
||||
b.Config.Workspace.RootPath = fmt.Sprintf(
|
||||
"~/.bundle/%s/%s",
|
||||
b.Config.Bundle.Name,
|
||||
b.Config.Bundle.Environment,
|
||||
b.Config.Bundle.Target,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ func TestDefaultWorkspaceRoot(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Bundle: config.Bundle{
|
||||
Name: "name",
|
||||
Environment: "environment",
|
||||
Target: "environment",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -24,17 +24,20 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// load branch name if undefined
|
||||
if b.Config.Bundle.Git.Branch == "" {
|
||||
|
||||
// Read branch name of current checkout
|
||||
branch, err := repo.CurrentBranch()
|
||||
if err != nil {
|
||||
log.Warnf(ctx, "failed to load current branch: %s", err)
|
||||
} else {
|
||||
b.Config.Bundle.Git.Branch = branch
|
||||
if err == nil {
|
||||
b.Config.Bundle.Git.ActualBranch = branch
|
||||
if b.Config.Bundle.Git.Branch == "" {
|
||||
// Only load branch if there's no user defined value
|
||||
b.Config.Bundle.Git.Inferred = true
|
||||
b.Config.Bundle.Git.Branch = branch
|
||||
}
|
||||
} else {
|
||||
log.Warnf(ctx, "failed to load current branch: %s", err)
|
||||
}
|
||||
|
||||
// load commit hash if undefined
|
||||
if b.Config.Bundle.Git.Commit == "" {
|
||||
commit, err := repo.LatestCommit()
|
||||
|
|
|
@ -7,11 +7,11 @@ import (
|
|||
func DefaultMutators() []bundle.Mutator {
|
||||
return []bundle.Mutator{
|
||||
ProcessRootIncludes(),
|
||||
DefineDefaultEnvironment(),
|
||||
DefineDefaultTarget(),
|
||||
LoadGitDetails(),
|
||||
}
|
||||
}
|
||||
|
||||
func DefaultMutatorsForEnvironment(env string) []bundle.Mutator {
|
||||
return append(DefaultMutators(), SelectEnvironment(env))
|
||||
func DefaultMutatorsForTarget(env string) []bundle.Mutator {
|
||||
return append(DefaultMutators(), SelectTarget(env))
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ func overrideJobCompute(j *resources.Job, compute string) {
|
|||
func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
if b.Config.Bundle.Mode != config.Development {
|
||||
if b.Config.Bundle.ComputeID != "" {
|
||||
return fmt.Errorf("cannot override compute for an environment that does not use 'mode: development'")
|
||||
return fmt.Errorf("cannot override compute for an target that does not use 'mode: development'")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -5,11 +5,11 @@ import (
|
|||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
// Get extra include paths from environment variable
|
||||
|
|
|
@ -13,16 +13,16 @@ import (
|
|||
"github.com/databricks/databricks-sdk-go/service/ml"
|
||||
)
|
||||
|
||||
type processEnvironmentMode struct{}
|
||||
type processTargetMode struct{}
|
||||
|
||||
const developmentConcurrentRuns = 4
|
||||
|
||||
func ProcessEnvironmentMode() bundle.Mutator {
|
||||
return &processEnvironmentMode{}
|
||||
func ProcessTargetMode() bundle.Mutator {
|
||||
return &processTargetMode{}
|
||||
}
|
||||
|
||||
func (m *processEnvironmentMode) Name() string {
|
||||
return "ProcessEnvironmentMode"
|
||||
func (m *processTargetMode) Name() string {
|
||||
return "ProcessTargetMode"
|
||||
}
|
||||
|
||||
// Mark all resources as being for 'development' purposes, i.e.
|
||||
|
@ -110,14 +110,14 @@ func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string {
|
|||
|
||||
func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error {
|
||||
if b.Config.Bundle.Git.Inferred {
|
||||
env := b.Config.Bundle.Environment
|
||||
return fmt.Errorf("environment with 'mode: production' must specify an explicit 'environments.%s.git' configuration", env)
|
||||
env := b.Config.Bundle.Target
|
||||
return fmt.Errorf("target with 'mode: production' must specify an explicit 'targets.%s.git' configuration", env)
|
||||
}
|
||||
|
||||
r := b.Config.Resources
|
||||
for i := range r.Pipelines {
|
||||
if r.Pipelines[i].Development {
|
||||
return fmt.Errorf("environment with 'mode: production' cannot specify a pipeline with 'development: true'")
|
||||
return fmt.Errorf("target with 'mode: production' cannot specify a pipeline with 'development: true'")
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUs
|
|||
if path := findIncorrectPath(b, config.Production); path != "" {
|
||||
message := "%s must not contain the current username when using 'mode: production'"
|
||||
if path == "root_path" {
|
||||
return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.environment}", path)
|
||||
return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.target}", path)
|
||||
} else {
|
||||
return fmt.Errorf(message, path)
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ func isRunAsSet(r config.Resources) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
switch b.Config.Bundle.Mode {
|
||||
case config.Development:
|
||||
err := validateDevelopmentMode(b)
|
|
@ -58,10 +58,10 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
|
|||
}
|
||||
}
|
||||
|
||||
func TestProcessEnvironmentModeDevelopment(t *testing.T) {
|
||||
func TestProcessTargetModeDevelopment(t *testing.T) {
|
||||
bundle := mockBundle(config.Development)
|
||||
|
||||
m := ProcessEnvironmentMode()
|
||||
m := ProcessTargetMode()
|
||||
err := m.Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "[dev lennart] job1", bundle.Config.Resources.Jobs["job1"].Name)
|
||||
|
@ -73,10 +73,10 @@ func TestProcessEnvironmentModeDevelopment(t *testing.T) {
|
|||
assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
||||
}
|
||||
|
||||
func TestProcessEnvironmentModeDefault(t *testing.T) {
|
||||
func TestProcessTargetModeDefault(t *testing.T) {
|
||||
bundle := mockBundle("")
|
||||
|
||||
m := ProcessEnvironmentMode()
|
||||
m := ProcessTargetMode()
|
||||
err := m.Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name)
|
||||
|
@ -84,7 +84,7 @@ func TestProcessEnvironmentModeDefault(t *testing.T) {
|
|||
assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
||||
}
|
||||
|
||||
func TestProcessEnvironmentModeProduction(t *testing.T) {
|
||||
func TestProcessTargetModeProduction(t *testing.T) {
|
||||
bundle := mockBundle(config.Production)
|
||||
|
||||
err := validateProductionMode(context.Background(), bundle, false)
|
||||
|
@ -118,7 +118,7 @@ func TestProcessEnvironmentModeProduction(t *testing.T) {
|
|||
assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
||||
}
|
||||
|
||||
func TestProcessEnvironmentModeProductionGit(t *testing.T) {
|
||||
func TestProcessTargetModeProductionGit(t *testing.T) {
|
||||
bundle := mockBundle(config.Production)
|
||||
|
||||
// Pretend the user didn't set Git configuration explicitly
|
||||
|
@ -129,10 +129,10 @@ func TestProcessEnvironmentModeProductionGit(t *testing.T) {
|
|||
bundle.Config.Bundle.Git.Inferred = false
|
||||
}
|
||||
|
||||
func TestProcessEnvironmentModeProductionOkForPrincipal(t *testing.T) {
|
||||
func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) {
|
||||
bundle := mockBundle(config.Production)
|
||||
|
||||
// Our environment has all kinds of problems when not using service principals ...
|
||||
// Our target has all kinds of problems when not using service principals ...
|
||||
err := validateProductionMode(context.Background(), bundle, false)
|
||||
require.Error(t, err)
|
||||
|
||||
|
@ -152,7 +152,7 @@ func TestAllResourcesMocked(t *testing.T) {
|
|||
assert.True(
|
||||
t,
|
||||
!field.IsNil() && field.Len() > 0,
|
||||
"process_environment_mode should support '%s' (please add it to process_environment_mode.go and extend the test suite)",
|
||||
"process_target_mode should support '%s' (please add it to process_target_mode.go and extend the test suite)",
|
||||
resources.Type().Field(i).Name,
|
||||
)
|
||||
}
|
||||
|
@ -164,7 +164,7 @@ func TestAllResourcesRenamed(t *testing.T) {
|
|||
bundle := mockBundle(config.Development)
|
||||
resources := reflect.ValueOf(bundle.Config.Resources)
|
||||
|
||||
m := ProcessEnvironmentMode()
|
||||
m := ProcessTargetMode()
|
||||
err := m.Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
|
||||
|
@ -179,7 +179,7 @@ func TestAllResourcesRenamed(t *testing.T) {
|
|||
assert.True(
|
||||
t,
|
||||
strings.Contains(nameField.String(), "dev"),
|
||||
"process_environment_mode should rename '%s' in '%s'",
|
||||
"process_target_mode should rename '%s' in '%s'",
|
||||
key,
|
||||
resources.Type().Field(i).Name,
|
||||
)
|
|
@ -1,54 +0,0 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
type selectDefaultEnvironment struct{}
|
||||
|
||||
// SelectDefaultEnvironment merges the default environment into the root configuration.
|
||||
func SelectDefaultEnvironment() bundle.Mutator {
|
||||
return &selectDefaultEnvironment{}
|
||||
}
|
||||
|
||||
func (m *selectDefaultEnvironment) Name() string {
|
||||
return "SelectDefaultEnvironment"
|
||||
}
|
||||
|
||||
func (m *selectDefaultEnvironment) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
if len(b.Config.Environments) == 0 {
|
||||
return fmt.Errorf("no environments defined")
|
||||
}
|
||||
|
||||
// One environment means there's only one default.
|
||||
names := maps.Keys(b.Config.Environments)
|
||||
if len(names) == 1 {
|
||||
return SelectEnvironment(names[0]).Apply(ctx, b)
|
||||
}
|
||||
|
||||
// Multiple environments means we look for the `default` flag.
|
||||
var defaults []string
|
||||
for name, env := range b.Config.Environments {
|
||||
if env != nil && env.Default {
|
||||
defaults = append(defaults, name)
|
||||
}
|
||||
}
|
||||
|
||||
// It is invalid to have multiple environments with the `default` flag set.
|
||||
if len(defaults) > 1 {
|
||||
return fmt.Errorf("multiple environments are marked as default (%s)", strings.Join(defaults, ", "))
|
||||
}
|
||||
|
||||
// If no environment has the `default` flag set, ask the user to specify one.
|
||||
if len(defaults) == 0 {
|
||||
return fmt.Errorf("please specify environment")
|
||||
}
|
||||
|
||||
// One default remaining.
|
||||
return SelectEnvironment(defaults[0]).Apply(ctx, b)
|
||||
}
|
|
@ -1,90 +0,0 @@
|
|||
package mutator_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSelectDefaultEnvironmentNoEnvironments(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "no environments defined")
|
||||
}
|
||||
|
||||
func TestSelectDefaultEnvironmentSingleEnvironments(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
"foo": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "foo", bundle.Config.Bundle.Environment)
|
||||
}
|
||||
|
||||
func TestSelectDefaultEnvironmentNoDefaults(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
"foo": {},
|
||||
"bar": {},
|
||||
"qux": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "please specify environment")
|
||||
}
|
||||
|
||||
func TestSelectDefaultEnvironmentNoDefaultsWithNil(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
"foo": nil,
|
||||
"bar": nil,
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "please specify environment")
|
||||
}
|
||||
|
||||
func TestSelectDefaultEnvironmentMultipleDefaults(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
"foo": {Default: true},
|
||||
"bar": {Default: true},
|
||||
"qux": {Default: true},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "multiple environments are marked as default")
|
||||
}
|
||||
|
||||
func TestSelectDefaultEnvironmentSingleDefault(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
"foo": {},
|
||||
"bar": {Default: true},
|
||||
"qux": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "bar", bundle.Config.Bundle.Environment)
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
type selectDefaultTarget struct{}
|
||||
|
||||
// SelectDefaultTarget merges the default target into the root configuration.
|
||||
func SelectDefaultTarget() bundle.Mutator {
|
||||
return &selectDefaultTarget{}
|
||||
}
|
||||
|
||||
func (m *selectDefaultTarget) Name() string {
|
||||
return "SelectDefaultTarget"
|
||||
}
|
||||
|
||||
func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error {
|
||||
if len(b.Config.Targets) == 0 {
|
||||
return fmt.Errorf("no targets defined")
|
||||
}
|
||||
|
||||
// One target means there's only one default.
|
||||
names := maps.Keys(b.Config.Targets)
|
||||
if len(names) == 1 {
|
||||
return SelectTarget(names[0]).Apply(ctx, b)
|
||||
}
|
||||
|
||||
// Multiple targets means we look for the `default` flag.
|
||||
var defaults []string
|
||||
for name, env := range b.Config.Targets {
|
||||
if env != nil && env.Default {
|
||||
defaults = append(defaults, name)
|
||||
}
|
||||
}
|
||||
|
||||
// It is invalid to have multiple targets with the `default` flag set.
|
||||
if len(defaults) > 1 {
|
||||
return fmt.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", "))
|
||||
}
|
||||
|
||||
// If no target has the `default` flag set, ask the user to specify one.
|
||||
if len(defaults) == 0 {
|
||||
return fmt.Errorf("please specify target")
|
||||
}
|
||||
|
||||
// One default remaining.
|
||||
return SelectTarget(defaults[0]).Apply(ctx, b)
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
package mutator_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSelectDefaultTargetNoTargets(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "no targets defined")
|
||||
}
|
||||
|
||||
func TestSelectDefaultTargetSingleTargets(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{
|
||||
"foo": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "foo", bundle.Config.Bundle.Target)
|
||||
}
|
||||
|
||||
func TestSelectDefaultTargetNoDefaults(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{
|
||||
"foo": {},
|
||||
"bar": {},
|
||||
"qux": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "please specify target")
|
||||
}
|
||||
|
||||
func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{
|
||||
"foo": nil,
|
||||
"bar": nil,
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "please specify target")
|
||||
}
|
||||
|
||||
func TestSelectDefaultTargetMultipleDefaults(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{
|
||||
"foo": {Default: true},
|
||||
"bar": {Default: true},
|
||||
"qux": {Default: true},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.ErrorContains(t, err, "multiple targets are marked as default")
|
||||
}
|
||||
|
||||
func TestSelectDefaultTargetSingleDefault(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Targets: map[string]*config.Target{
|
||||
"foo": {},
|
||||
"bar": {Default: true},
|
||||
"qux": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "bar", bundle.Config.Bundle.Target)
|
||||
}
|
|
@ -1,48 +0,0 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
)
|
||||
|
||||
type selectEnvironment struct {
|
||||
name string
|
||||
}
|
||||
|
||||
// SelectEnvironment merges the specified environment into the root configuration.
|
||||
func SelectEnvironment(name string) bundle.Mutator {
|
||||
return &selectEnvironment{
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *selectEnvironment) Name() string {
|
||||
return fmt.Sprintf("SelectEnvironment(%s)", m.name)
|
||||
}
|
||||
|
||||
func (m *selectEnvironment) Apply(_ context.Context, b *bundle.Bundle) error {
|
||||
if b.Config.Environments == nil {
|
||||
return fmt.Errorf("no environments defined")
|
||||
}
|
||||
|
||||
// Get specified environment
|
||||
env, ok := b.Config.Environments[m.name]
|
||||
if !ok {
|
||||
return fmt.Errorf("%s: no such environment", m.name)
|
||||
}
|
||||
|
||||
// Merge specified environment into root configuration structure.
|
||||
err := b.Config.MergeEnvironment(env)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Store specified environment in configuration for reference.
|
||||
b.Config.Bundle.Environment = m.name
|
||||
|
||||
// Clear environments after loading.
|
||||
b.Config.Environments = nil
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,54 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
)
|
||||
|
||||
type selectTarget struct {
|
||||
name string
|
||||
}
|
||||
|
||||
// SelectTarget merges the specified target into the root configuration.
|
||||
func SelectTarget(name string) bundle.Mutator {
|
||||
return &selectTarget{
|
||||
name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func (m *selectTarget) Name() string {
|
||||
return fmt.Sprintf("SelectTarget(%s)", m.name)
|
||||
}
|
||||
|
||||
func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error {
|
||||
if b.Config.Targets == nil {
|
||||
return fmt.Errorf("no targets defined")
|
||||
}
|
||||
|
||||
// Get specified target
|
||||
target, ok := b.Config.Targets[m.name]
|
||||
if !ok {
|
||||
return fmt.Errorf("%s: no such target", m.name)
|
||||
}
|
||||
|
||||
// Merge specified target into root configuration structure.
|
||||
err := b.Config.MergeTargetOverrides(target)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Store specified target in configuration for reference.
|
||||
b.Config.Bundle.Target = m.name
|
||||
|
||||
// We do this for backward compatibility.
|
||||
// TODO: remove when Environments section is not supported anymore.
|
||||
b.Config.Bundle.Environment = b.Config.Bundle.Target
|
||||
|
||||
// Clear targets after loading.
|
||||
b.Config.Targets = nil
|
||||
b.Config.Environments = nil
|
||||
|
||||
return nil
|
||||
}
|
|
@ -11,13 +11,13 @@ import (
|
|||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSelectEnvironment(t *testing.T) {
|
||||
func TestSelectTarget(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Workspace: config.Workspace{
|
||||
Host: "foo",
|
||||
},
|
||||
Environments: map[string]*config.Environment{
|
||||
Targets: map[string]*config.Target{
|
||||
"default": {
|
||||
Workspace: &config.Workspace{
|
||||
Host: "bar",
|
||||
|
@ -26,19 +26,19 @@ func TestSelectEnvironment(t *testing.T) {
|
|||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectEnvironment("default").Apply(context.Background(), bundle)
|
||||
err := mutator.SelectTarget("default").Apply(context.Background(), bundle)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "bar", bundle.Config.Workspace.Host)
|
||||
}
|
||||
|
||||
func TestSelectEnvironmentNotFound(t *testing.T) {
|
||||
func TestSelectTargetNotFound(t *testing.T) {
|
||||
bundle := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Environments: map[string]*config.Environment{
|
||||
Targets: map[string]*config.Target{
|
||||
"default": {},
|
||||
},
|
||||
},
|
||||
}
|
||||
err := mutator.SelectEnvironment("doesnt-exist").Apply(context.Background(), bundle)
|
||||
require.Error(t, err, "no environments defined")
|
||||
err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), bundle)
|
||||
require.Error(t, err, "no targets defined")
|
||||
}
|
|
@ -113,3 +113,14 @@ func (r *Resources) SetConfigFilePath(path string) {
|
|||
e.ConfigFilePath = path
|
||||
}
|
||||
}
|
||||
|
||||
// MergeJobClusters iterates over all jobs and merges their job clusters.
|
||||
// This is called after applying the target overrides.
|
||||
func (r *Resources) MergeJobClusters() error {
|
||||
for _, job := range r.Jobs {
|
||||
if err := job.MergeJobClusters(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
package resources
|
||||
|
||||
import "github.com/databricks/databricks-sdk-go/service/jobs"
|
||||
import (
|
||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||
"github.com/imdario/mergo"
|
||||
)
|
||||
|
||||
type Job struct {
|
||||
ID string `json:"id,omitempty" bundle:"readonly"`
|
||||
|
@ -10,3 +13,36 @@ type Job struct {
|
|||
|
||||
*jobs.JobSettings
|
||||
}
|
||||
|
||||
// MergeJobClusters merges job clusters with the same key.
|
||||
// The job clusters field is a slice, and as such, overrides are appended to it.
|
||||
// We can identify a job cluster by its key, however, so we can use this key
|
||||
// to figure out which definitions are actually overrides and merge them.
|
||||
func (j *Job) MergeJobClusters() error {
|
||||
keys := make(map[string]*jobs.JobCluster)
|
||||
output := make([]jobs.JobCluster, 0, len(j.JobClusters))
|
||||
|
||||
// Target overrides are always appended, so we can iterate in natural order to
|
||||
// first find the base definition, and merge instances we encounter later.
|
||||
for i := range j.JobClusters {
|
||||
key := j.JobClusters[i].JobClusterKey
|
||||
|
||||
// Register job cluster with key if not yet seen before.
|
||||
ref, ok := keys[key]
|
||||
if !ok {
|
||||
output = append(output, j.JobClusters[i])
|
||||
keys[key] = &j.JobClusters[i]
|
||||
continue
|
||||
}
|
||||
|
||||
// Merge this instance into the reference.
|
||||
err := mergo.Merge(ref, &j.JobClusters[i], mergo.WithOverride, mergo.WithAppendSlice)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// Overwrite resulting slice.
|
||||
j.JobClusters = output
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
package resources
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestJobMergeJobClusters(t *testing.T) {
|
||||
j := &Job{
|
||||
JobSettings: &jobs.JobSettings{
|
||||
JobClusters: []jobs.JobCluster{
|
||||
{
|
||||
JobClusterKey: "foo",
|
||||
NewCluster: &compute.ClusterSpec{
|
||||
SparkVersion: "13.3.x-scala2.12",
|
||||
NodeTypeId: "i3.xlarge",
|
||||
NumWorkers: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
JobClusterKey: "bar",
|
||||
NewCluster: &compute.ClusterSpec{
|
||||
SparkVersion: "10.4.x-scala2.12",
|
||||
},
|
||||
},
|
||||
{
|
||||
JobClusterKey: "foo",
|
||||
NewCluster: &compute.ClusterSpec{
|
||||
NodeTypeId: "i3.2xlarge",
|
||||
NumWorkers: 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := j.MergeJobClusters()
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Len(t, j.JobClusters, 2)
|
||||
assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey)
|
||||
assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey)
|
||||
|
||||
// This job cluster was merged with a subsequent one.
|
||||
jc0 := j.JobClusters[0].NewCluster
|
||||
assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion)
|
||||
assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId)
|
||||
assert.Equal(t, 4, jc0.NumWorkers)
|
||||
|
||||
// This job cluster was left untouched.
|
||||
jc1 := j.JobClusters[1].NewCluster
|
||||
assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion)
|
||||
}
|
|
@ -69,11 +69,17 @@ type Root struct {
|
|||
// to deploy in this bundle (e.g. jobs, pipelines, etc.).
|
||||
Resources Resources `json:"resources,omitempty"`
|
||||
|
||||
// Environments can be used to differentiate settings and resources between
|
||||
// bundle deployment environments (e.g. development, staging, production).
|
||||
// Targets can be used to differentiate settings and resources between
|
||||
// bundle deployment targets (e.g. development, staging, production).
|
||||
// If not specified, the code below initializes this field with a
|
||||
// single default-initialized environment called "default".
|
||||
Environments map[string]*Environment `json:"environments,omitempty"`
|
||||
// single default-initialized target called "default".
|
||||
Targets map[string]*Target `json:"targets,omitempty"`
|
||||
|
||||
// DEPRECATED. Left for backward compatibility with Targets
|
||||
Environments map[string]*Target `json:"environments,omitempty"`
|
||||
|
||||
// Sync section specifies options for files synchronization
|
||||
Sync Sync `json:"sync"`
|
||||
}
|
||||
|
||||
func Load(path string) (*Root, error) {
|
||||
|
@ -103,8 +109,8 @@ func Load(path string) (*Root, error) {
|
|||
// was loaded from in configuration leafs that require it.
|
||||
func (r *Root) SetConfigFilePath(path string) {
|
||||
r.Resources.SetConfigFilePath(path)
|
||||
if r.Environments != nil {
|
||||
for _, env := range r.Environments {
|
||||
if r.Targets != nil {
|
||||
for _, env := range r.Targets {
|
||||
if env == nil {
|
||||
continue
|
||||
}
|
||||
|
@ -148,6 +154,15 @@ func (r *Root) Load(path string) error {
|
|||
return fmt.Errorf("failed to load %s: %w", path, err)
|
||||
}
|
||||
|
||||
if r.Environments != nil && r.Targets != nil {
|
||||
return fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path)
|
||||
}
|
||||
|
||||
if r.Environments != nil {
|
||||
//TODO: add a command line notice that this is a deprecated option.
|
||||
r.Targets = r.Environments
|
||||
}
|
||||
|
||||
r.Path = filepath.Dir(path)
|
||||
r.SetConfigFilePath(path)
|
||||
|
||||
|
@ -166,47 +181,52 @@ func (r *Root) Merge(other *Root) error {
|
|||
}
|
||||
|
||||
// TODO: define and test semantics for merging.
|
||||
return mergo.MergeWithOverwrite(r, other)
|
||||
return mergo.Merge(r, other, mergo.WithOverride)
|
||||
}
|
||||
|
||||
func (r *Root) MergeEnvironment(env *Environment) error {
|
||||
func (r *Root) MergeTargetOverrides(target *Target) error {
|
||||
var err error
|
||||
|
||||
// Environment may be nil if it's empty.
|
||||
if env == nil {
|
||||
// Target may be nil if it's empty.
|
||||
if target == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if env.Bundle != nil {
|
||||
err = mergo.MergeWithOverwrite(&r.Bundle, env.Bundle)
|
||||
if target.Bundle != nil {
|
||||
err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if env.Workspace != nil {
|
||||
err = mergo.MergeWithOverwrite(&r.Workspace, env.Workspace)
|
||||
if target.Workspace != nil {
|
||||
err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if env.Artifacts != nil {
|
||||
err = mergo.Merge(&r.Artifacts, env.Artifacts, mergo.WithAppendSlice)
|
||||
if target.Artifacts != nil {
|
||||
err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if env.Resources != nil {
|
||||
err = mergo.Merge(&r.Resources, env.Resources, mergo.WithAppendSlice)
|
||||
if target.Resources != nil {
|
||||
err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = r.Resources.MergeJobClusters()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if env.Variables != nil {
|
||||
for k, v := range env.Variables {
|
||||
if target.Variables != nil {
|
||||
for k, v := range target.Variables {
|
||||
variable, ok := r.Variables[k]
|
||||
if !ok {
|
||||
return fmt.Errorf("variable %s is not defined but is assigned a value", k)
|
||||
|
@ -217,24 +237,24 @@ func (r *Root) MergeEnvironment(env *Environment) error {
|
|||
}
|
||||
}
|
||||
|
||||
if env.Mode != "" {
|
||||
r.Bundle.Mode = env.Mode
|
||||
if target.Mode != "" {
|
||||
r.Bundle.Mode = target.Mode
|
||||
}
|
||||
|
||||
if env.ComputeID != "" {
|
||||
r.Bundle.ComputeID = env.ComputeID
|
||||
if target.ComputeID != "" {
|
||||
r.Bundle.ComputeID = target.ComputeID
|
||||
}
|
||||
|
||||
git := &r.Bundle.Git
|
||||
if env.Git.Branch != "" {
|
||||
git.Branch = env.Git.Branch
|
||||
if target.Git.Branch != "" {
|
||||
git.Branch = target.Git.Branch
|
||||
git.Inferred = false
|
||||
}
|
||||
if env.Git.Commit != "" {
|
||||
git.Commit = env.Git.Commit
|
||||
if target.Git.Commit != "" {
|
||||
git.Commit = target.Git.Commit
|
||||
}
|
||||
if env.Git.OriginURL != "" {
|
||||
git.OriginURL = env.Git.OriginURL
|
||||
if target.Git.OriginURL != "" {
|
||||
git.OriginURL = target.Git.OriginURL
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
@ -57,7 +57,7 @@ func TestRootMergeStruct(t *testing.T) {
|
|||
func TestRootMergeMap(t *testing.T) {
|
||||
root := &Root{
|
||||
Path: "path",
|
||||
Environments: map[string]*Environment{
|
||||
Targets: map[string]*Target{
|
||||
"development": {
|
||||
Workspace: &Workspace{
|
||||
Host: "foo",
|
||||
|
@ -68,7 +68,7 @@ func TestRootMergeMap(t *testing.T) {
|
|||
}
|
||||
other := &Root{
|
||||
Path: "path",
|
||||
Environments: map[string]*Environment{
|
||||
Targets: map[string]*Target{
|
||||
"development": {
|
||||
Workspace: &Workspace{
|
||||
Host: "bar",
|
||||
|
@ -77,7 +77,7 @@ func TestRootMergeMap(t *testing.T) {
|
|||
},
|
||||
}
|
||||
assert.NoError(t, root.Merge(other))
|
||||
assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Environments["development"].Workspace)
|
||||
assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Targets["development"].Workspace)
|
||||
}
|
||||
|
||||
func TestDuplicateIdOnLoadReturnsError(t *testing.T) {
|
||||
|
@ -159,12 +159,12 @@ func TestInitializeVariablesUndefinedVariables(t *testing.T) {
|
|||
assert.ErrorContains(t, err, "variable bar has not been defined")
|
||||
}
|
||||
|
||||
func TestRootMergeEnvironmentWithMode(t *testing.T) {
|
||||
func TestRootMergeTargetOverridesWithMode(t *testing.T) {
|
||||
root := &Root{
|
||||
Bundle: Bundle{},
|
||||
}
|
||||
env := &Environment{Mode: Development}
|
||||
require.NoError(t, root.MergeEnvironment(env))
|
||||
env := &Target{Mode: Development}
|
||||
require.NoError(t, root.MergeTargetOverrides(env))
|
||||
assert.Equal(t, Development, root.Bundle.Mode)
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
package config
|
||||
|
||||
type Sync struct {
|
||||
// Include contains a list of globs evaluated relative to the bundle root path
|
||||
// to explicitly include files that were excluded by the user's gitignore.
|
||||
Include []string `json:"include,omitempty"`
|
||||
|
||||
// Exclude contains a list of globs evaluated relative to the bundle root path
|
||||
// to explicitly exclude files that were included by
|
||||
// 1) the default that observes the user's gitignore, or
|
||||
// 2) the `Include` field above.
|
||||
Exclude []string `json:"exclude,omitempty"`
|
||||
}
|
|
@ -2,14 +2,14 @@ package config
|
|||
|
||||
type Mode string
|
||||
|
||||
// Environment defines overrides for a single environment.
|
||||
// Target defines overrides for a single target.
|
||||
// This structure is recursively merged into the root configuration.
|
||||
type Environment struct {
|
||||
// Default marks that this environment must be used if one isn't specified
|
||||
// by the user (through environment variable or command line argument).
|
||||
type Target struct {
|
||||
// Default marks that this target must be used if one isn't specified
|
||||
// by the user (through target variable or command line argument).
|
||||
Default bool `json:"default,omitempty"`
|
||||
|
||||
// Determines the mode of the environment.
|
||||
// Determines the mode of the target.
|
||||
// For example, 'mode: development' can be used for deployments for
|
||||
// development purposes.
|
||||
Mode Mode `json:"mode,omitempty"`
|
||||
|
@ -27,7 +27,7 @@ type Environment struct {
|
|||
|
||||
// Override default values for defined variables
|
||||
// Does not permit defining new variables or redefining existing ones
|
||||
// in the scope of an environment
|
||||
// in the scope of an target
|
||||
Variables map[string]string `json:"variables,omitempty"`
|
||||
|
||||
Git Git `json:"git,omitempty"`
|
|
@ -18,7 +18,7 @@ type Variable struct {
|
|||
// resolved in the following priority order (from highest to lowest)
|
||||
//
|
||||
// 1. Command line flag. For example: `--var="foo=bar"`
|
||||
// 2. Environment variable. eg: BUNDLE_VAR_foo=bar
|
||||
// 2. Target variable. eg: BUNDLE_VAR_foo=bar
|
||||
// 3. Default value as defined in the applicable environments block
|
||||
// 4. Default value defined in variable definition
|
||||
// 5. Throw error, since if no default value is defined, then the variable
|
||||
|
|
|
@ -24,7 +24,7 @@ type Workspace struct {
|
|||
Host string `json:"host,omitempty"`
|
||||
Profile string `json:"profile,omitempty"`
|
||||
AuthType string `json:"auth_type,omitempty"`
|
||||
MetadataServiceURL string `json:"metadata_service_url,omitempty"`
|
||||
MetadataServiceURL string `json:"metadata_service_url,omitempty" bundle:"internal"`
|
||||
|
||||
// OAuth specific attributes.
|
||||
ClientID string `json:"client_id,omitempty"`
|
||||
|
@ -45,7 +45,7 @@ type Workspace struct {
|
|||
CurrentUser *User `json:"current_user,omitempty" bundle:"readonly"`
|
||||
|
||||
// Remote workspace base path for deployment state, for artifacts, as synchronization target.
|
||||
// This defaults to "~/.bundle/${bundle.name}/${bundle.environment}" where "~" expands to
|
||||
// This defaults to "~/.bundle/${bundle.name}/${bundle.target}" where "~" expands to
|
||||
// the current user's home directory in the workspace (e.g. `/Users/jane@doe.com`).
|
||||
RootPath string `json:"root_path,omitempty"`
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error {
|
|||
|
||||
red := color.New(color.FgRed).SprintFunc()
|
||||
if !b.AutoApprove {
|
||||
proceed, err := cmdio.Ask(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?: ", b.Config.Workspace.RootPath, red("deleted permanently!")))
|
||||
proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!")))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -14,9 +14,17 @@ func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) {
|
|||
return nil, fmt.Errorf("cannot get bundle cache directory: %w", err)
|
||||
}
|
||||
|
||||
includes, err := b.GetSyncIncludePatterns()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot get list of sync includes: %w", err)
|
||||
}
|
||||
|
||||
opts := sync.SyncOptions{
|
||||
LocalPath: b.Config.Path,
|
||||
RemotePath: b.Config.Workspace.FilesPath,
|
||||
Include: includes,
|
||||
Exclude: b.Config.Sync.Exclude,
|
||||
|
||||
Full: false,
|
||||
CurrentUser: b.Config.Workspace.CurrentUser.User,
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error {
|
|||
// Ask for confirmation, if needed
|
||||
if !b.Plan.ConfirmApply {
|
||||
red := color.New(color.FgRed).SprintFunc()
|
||||
b.Plan.ConfirmApply, err = cmdio.Ask(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed? [y/n]: ", red("destroy")))
|
||||
b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy")))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -57,7 +57,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con
|
|||
// Download Terraform to private bin directory.
|
||||
installer := &releases.LatestVersion{
|
||||
Product: product.Terraform,
|
||||
Constraints: version.MustConstraints(version.NewConstraint("<2.0")),
|
||||
Constraints: version.MustConstraints(version.NewConstraint("<=1.5.5")),
|
||||
InstallDir: binDir,
|
||||
}
|
||||
execPath, err = installer.Install(ctx)
|
||||
|
|
|
@ -31,7 +31,7 @@ func TestInitEnvironmentVariables(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
Terraform: &config.Terraform{
|
||||
ExecPath: "terraform",
|
||||
},
|
||||
|
@ -58,7 +58,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -86,7 +86,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -112,7 +112,7 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -142,7 +142,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -172,7 +172,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -202,7 +202,7 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ func TestSetProxyEnvVars(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ func TestLoadWithNoState(t *testing.T) {
|
|||
Config: config.Root{
|
||||
Path: t.TempDir(),
|
||||
Bundle: config.Bundle{
|
||||
Environment: "whatever",
|
||||
Target: "whatever",
|
||||
Terraform: &config.Terraform{
|
||||
ExecPath: "terraform",
|
||||
},
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
package generator
|
||||
|
||||
import (
|
||||
"slices"
|
||||
|
||||
"golang.org/x/exp/maps"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
// sortKeys returns a sorted copy of the keys in the specified map.
|
||||
|
|
|
@ -4,10 +4,11 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"slices"
|
||||
|
||||
tfjson "github.com/hashicorp/terraform-json"
|
||||
"github.com/iancoleman/strcase"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
"golang.org/x/exp/slices"
|
||||
)
|
||||
|
||||
type field struct {
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
|
@ -133,5 +134,20 @@ func libPath(library *compute.Library) string {
|
|||
}
|
||||
|
||||
func isLocalLibrary(library *compute.Library) bool {
|
||||
return libPath(library) != ""
|
||||
path := libPath(library)
|
||||
if path == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
return !isDbfsPath(path) && !isWorkspacePath(path)
|
||||
}
|
||||
|
||||
func isDbfsPath(path string) bool {
|
||||
return strings.HasPrefix(path, "dbfs:/")
|
||||
}
|
||||
|
||||
func isWorkspacePath(path string) bool {
|
||||
return strings.HasPrefix(path, "/Workspace/") ||
|
||||
strings.HasPrefix(path, "/Users/") ||
|
||||
strings.HasPrefix(path, "/Shared/")
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ func Initialize() bundle.Mutator {
|
|||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
),
|
||||
mutator.OverrideCompute(),
|
||||
mutator.ProcessEnvironmentMode(),
|
||||
mutator.ProcessTargetMode(),
|
||||
mutator.TranslatePaths(),
|
||||
terraform.Initialize(),
|
||||
},
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package bundle
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
@ -108,7 +109,7 @@ func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) {
|
|||
chdir(t, filepath.Join(".", "tests", "basic"))
|
||||
t.Setenv(ExtraIncludePathsKey, "test")
|
||||
|
||||
bundle, err := MustLoad()
|
||||
bundle, err := MustLoad(context.Background())
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, "basic", bundle.Config.Bundle.Name)
|
||||
|
||||
|
@ -123,7 +124,7 @@ func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) {
|
|||
t.Setenv(envBundleRoot, dir)
|
||||
t.Setenv(ExtraIncludePathsKey, "test")
|
||||
|
||||
bundle, err := MustLoad()
|
||||
bundle, err := MustLoad(context.Background())
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, dir, bundle.Config.Path)
|
||||
}
|
||||
|
@ -133,7 +134,7 @@ func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) {
|
|||
chdir(t, dir)
|
||||
t.Setenv(ExtraIncludePathsKey, "test")
|
||||
|
||||
_, err := MustLoad()
|
||||
_, err := MustLoad(context.Background())
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
||||
|
@ -142,6 +143,6 @@ func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) {
|
|||
chdir(t, dir)
|
||||
t.Setenv(envBundleRoot, dir)
|
||||
|
||||
_, err := MustLoad()
|
||||
_, err := MustLoad(context.Background())
|
||||
assert.Error(t, err)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
`docs/bundle_descriptions.json` contains both autogenerated as well as manually written
|
||||
descriptions for the json schema. Specifically
|
||||
1. `resources` : almost all descriptions are autogenerated from the OpenAPI spec
|
||||
2. `environments` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`)
|
||||
2. `targets` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`)
|
||||
3. `bundle` : manually editted
|
||||
4. `include` : manually editted
|
||||
5. `workspace` : manually editted
|
||||
|
@ -17,7 +17,7 @@ These descriptions are rendered in the inline documentation in an IDE
|
|||
`databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json`
|
||||
2. Manually edit bundle_descriptions.json to add your descriptions
|
||||
3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`)
|
||||
4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments`
|
||||
4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `targets`
|
||||
5. push to repo
|
||||
|
||||
|
||||
|
|
|
@ -52,20 +52,20 @@ func BundleDocs(openapiSpecPath string) (*Docs, error) {
|
|||
}
|
||||
docs.Properties["resources"] = schemaToDocs(resourceSchema)
|
||||
}
|
||||
docs.refreshEnvironmentsDocs()
|
||||
docs.refreshTargetsDocs()
|
||||
return docs, nil
|
||||
}
|
||||
|
||||
func (docs *Docs) refreshEnvironmentsDocs() error {
|
||||
environmentsDocs, ok := docs.Properties["environments"]
|
||||
if !ok || environmentsDocs.AdditionalProperties == nil ||
|
||||
environmentsDocs.AdditionalProperties.Properties == nil {
|
||||
return fmt.Errorf("invalid environments descriptions")
|
||||
func (docs *Docs) refreshTargetsDocs() error {
|
||||
targetsDocs, ok := docs.Properties["targets"]
|
||||
if !ok || targetsDocs.AdditionalProperties == nil ||
|
||||
targetsDocs.AdditionalProperties.Properties == nil {
|
||||
return fmt.Errorf("invalid targets descriptions")
|
||||
}
|
||||
environmentProperties := environmentsDocs.AdditionalProperties.Properties
|
||||
targetProperties := targetsDocs.AdditionalProperties.Properties
|
||||
propertiesToCopy := []string{"artifacts", "bundle", "resources", "workspace"}
|
||||
for _, p := range propertiesToCopy {
|
||||
environmentProperties[p] = docs.Properties[p]
|
||||
targetProperties[p] = docs.Properties[p]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"environments": {
|
||||
"targets": {
|
||||
"description": "",
|
||||
"additionalproperties": {
|
||||
"description": "",
|
||||
|
@ -1827,7 +1827,7 @@
|
|||
"description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg."
|
||||
},
|
||||
"root_path": {
|
||||
"description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`"
|
||||
"description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`"
|
||||
},
|
||||
"state_path": {
|
||||
"description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`"
|
||||
|
@ -3591,7 +3591,7 @@
|
|||
"description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg."
|
||||
},
|
||||
"root_path": {
|
||||
"description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`"
|
||||
"description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`"
|
||||
},
|
||||
"state_path": {
|
||||
"description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`"
|
||||
|
|
|
@ -9,6 +9,14 @@ import (
|
|||
"github.com/databricks/cli/libs/jsonschema"
|
||||
)
|
||||
|
||||
// Fields tagged "readonly" should not be emitted in the schema as they are
|
||||
// computed at runtime, and should not be assigned a value by the bundle author.
|
||||
const readonlyTag = "readonly"
|
||||
|
||||
// Annotation for internal bundle fields that should not be exposed to customers.
|
||||
// Fields can be tagged as "internal" to remove them from the generated schema.
|
||||
const internalTag = "internal"
|
||||
|
||||
// This function translates golang types into json schema. Here is the mapping
|
||||
// between json schema types and golang types
|
||||
//
|
||||
|
@ -197,7 +205,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschem
|
|||
required := []string{}
|
||||
for _, child := range children {
|
||||
bundleTag := child.Tag.Get("bundle")
|
||||
if bundleTag == "readonly" {
|
||||
if bundleTag == readonlyTag || bundleTag == internalTag {
|
||||
continue
|
||||
}
|
||||
|
||||
|
|
|
@ -1462,3 +1462,55 @@ func TestBundleReadOnlytag(t *testing.T) {
|
|||
t.Log("[DEBUG] expected: ", expected)
|
||||
assert.Equal(t, expected, string(jsonSchema))
|
||||
}
|
||||
|
||||
func TestBundleInternalTag(t *testing.T) {
|
||||
type Pokemon struct {
|
||||
Pikachu string `json:"pikachu" bundle:"internal"`
|
||||
Raichu string `json:"raichu"`
|
||||
}
|
||||
|
||||
type Foo struct {
|
||||
Pokemon *Pokemon `json:"pokemon"`
|
||||
Apple int `json:"apple"`
|
||||
Mango string `json:"mango" bundle:"internal"`
|
||||
}
|
||||
|
||||
elem := Foo{}
|
||||
|
||||
schema, err := New(reflect.TypeOf(elem), nil)
|
||||
assert.NoError(t, err)
|
||||
|
||||
jsonSchema, err := json.MarshalIndent(schema, " ", " ")
|
||||
assert.NoError(t, err)
|
||||
|
||||
expected :=
|
||||
`{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"apple": {
|
||||
"type": "number"
|
||||
},
|
||||
"pokemon": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"raichu": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"raichu"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"pokemon",
|
||||
"apple"
|
||||
]
|
||||
}`
|
||||
|
||||
t.Log("[DEBUG] actual: ", string(jsonSchema))
|
||||
t.Log("[DEBUG] expected: ", expected)
|
||||
assert.Equal(t, expected, string(jsonSchema))
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
bundle:
|
||||
name: autoload git config test
|
||||
|
||||
environments:
|
||||
targets:
|
||||
development:
|
||||
default: true
|
||||
|
||||
|
|
|
@ -17,3 +17,5 @@ resources:
|
|||
python_wheel_task:
|
||||
package_name: "my_test_code"
|
||||
entry_point: "run"
|
||||
libraries:
|
||||
- whl: ./my_test_code/dist/*.whl
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
bundle:
|
||||
name: python-wheel
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test_job:
|
||||
name: "[${bundle.environment}] My Wheel Job"
|
||||
tasks:
|
||||
- task_key: TestTask
|
||||
existing_cluster_id: "0717-132531-5opeqon1"
|
||||
python_wheel_task:
|
||||
package_name: "my_test_code"
|
||||
entry_point: "run"
|
||||
libraries:
|
||||
- whl: dbfs://path/to/dist/mywheel.whl
|
|
@ -11,3 +11,5 @@ resources:
|
|||
python_wheel_task:
|
||||
package_name: "my_test_code"
|
||||
entry_point: "run"
|
||||
libraries:
|
||||
- whl: ./dist/*.whl
|
||||
|
|
|
@ -6,32 +6,57 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/libraries"
|
||||
"github.com/databricks/cli/bundle/phases"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestBundlePythonWheelBuild(t *testing.T) {
|
||||
b, err := bundle.Load("./python_wheel")
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel")
|
||||
require.NoError(t, err)
|
||||
|
||||
m := phases.Build()
|
||||
err = m.Apply(context.Background(), b)
|
||||
err = m.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
|
||||
matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
|
||||
match := libraries.MatchWithArtifacts()
|
||||
err = match.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestBundlePythonWheelBuildAutoDetect(t *testing.T) {
|
||||
b, err := bundle.Load("./python_wheel_no_artifact")
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel_no_artifact")
|
||||
require.NoError(t, err)
|
||||
|
||||
m := phases.Build()
|
||||
err = m.Apply(context.Background(), b)
|
||||
err = m.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
|
||||
matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(matches))
|
||||
|
||||
match := libraries.MatchWithArtifacts()
|
||||
err = match.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestBundlePythonWheelWithDBFSLib(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./python_wheel_dbfs_lib")
|
||||
require.NoError(t, err)
|
||||
|
||||
m := phases.Build()
|
||||
err = m.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
|
||||
match := libraries.MatchWithArtifacts()
|
||||
err = match.Apply(ctx, b)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
|
|
@ -13,24 +13,27 @@ import (
|
|||
)
|
||||
|
||||
func TestConflictingResourceIdsNoSubconfig(t *testing.T) {
|
||||
_, err := bundle.Load("./conflicting_resource_ids/no_subconfigurations")
|
||||
ctx := context.Background()
|
||||
_, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations")
|
||||
bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml")
|
||||
assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath))
|
||||
}
|
||||
|
||||
func TestConflictingResourceIdsOneSubconfig(t *testing.T) {
|
||||
b, err := bundle.Load("./conflicting_resource_ids/one_subconfiguration")
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration")
|
||||
require.NoError(t, err)
|
||||
err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml")
|
||||
resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml")
|
||||
assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath))
|
||||
}
|
||||
|
||||
func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) {
|
||||
b, err := bundle.Load("./conflicting_resource_ids/two_subconfigurations")
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations")
|
||||
require.NoError(t, err)
|
||||
err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml")
|
||||
resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml")
|
||||
assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath))
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
bundle:
|
||||
name: environment_empty
|
||||
|
||||
environments:
|
||||
development:
|
|
@ -1,12 +0,0 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEnvironmentEmpty(t *testing.T) {
|
||||
b := loadEnvironment(t, "./environment_empty", "development")
|
||||
assert.Equal(t, "development", b.Config.Bundle.Environment)
|
||||
}
|
|
@ -6,14 +6,14 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAutoLoad(t *testing.T) {
|
||||
b := load(t, "./autoload_git")
|
||||
func TestGitAutoLoadWithEnvironment(t *testing.T) {
|
||||
b := load(t, "./environments_autoload_git")
|
||||
assert.True(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli")
|
||||
}
|
||||
|
||||
func TestManuallySetBranch(t *testing.T) {
|
||||
b := loadEnvironment(t, "./autoload_git", "production")
|
||||
func TestGitManuallySetBranchWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_autoload_git", "production")
|
||||
assert.False(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
|
||||
assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli")
|
|
@ -0,0 +1,36 @@
|
|||
bundle:
|
||||
name: environment_overrides
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
job1:
|
||||
name: "base job"
|
||||
|
||||
pipelines:
|
||||
boolean1:
|
||||
photon: true
|
||||
|
||||
boolean2:
|
||||
photon: false
|
||||
|
||||
environments:
|
||||
development:
|
||||
default: true
|
||||
|
||||
staging:
|
||||
resources:
|
||||
jobs:
|
||||
job1:
|
||||
name: "staging job"
|
||||
|
||||
pipelines:
|
||||
boolean1:
|
||||
# Note: setting a property to a zero value (in Go) does not have effect.
|
||||
# See the corresponding test for details.
|
||||
photon: false
|
||||
|
||||
boolean2:
|
||||
photon: true
|
|
@ -6,12 +6,33 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEnvironmentOverridesDev(t *testing.T) {
|
||||
b := loadEnvironment(t, "./environment_overrides", "development")
|
||||
func TestEnvironmentOverridesWorkspaceDev(t *testing.T) {
|
||||
b := loadTarget(t, "./environment_overrides/workspace", "development")
|
||||
assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host)
|
||||
}
|
||||
|
||||
func TestEnvironmentOverridesStaging(t *testing.T) {
|
||||
b := loadEnvironment(t, "./environment_overrides", "staging")
|
||||
func TestEnvironmentOverridesWorkspaceStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./environment_overrides/workspace", "staging")
|
||||
assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host)
|
||||
}
|
||||
|
||||
func TestEnvironmentOverridesResourcesDev(t *testing.T) {
|
||||
b := loadTarget(t, "./environment_overrides/resources", "development")
|
||||
assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name)
|
||||
|
||||
// Base values are preserved in the development environment.
|
||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.Equal(t, false, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
}
|
||||
|
||||
func TestEnvironmentOverridesResourcesStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./environment_overrides/resources", "staging")
|
||||
assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name)
|
||||
|
||||
// Overrides are only applied if they are not zero-valued.
|
||||
// This means that in its current form, we cannot override a true value with a false value.
|
||||
// Note: this is not desirable and will be addressed by representing our configuration
|
||||
// in a different structure (e.g. with cty), instead of Go structs.
|
||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon)
|
||||
assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
bundle:
|
||||
name: autoload git config test
|
||||
|
||||
environments:
|
||||
development:
|
||||
default: true
|
||||
|
||||
production:
|
||||
# production can only be deployed from the 'main' branch
|
||||
git:
|
||||
branch: main
|
|
@ -0,0 +1,44 @@
|
|||
resources:
|
||||
pipelines:
|
||||
nyc_taxi_pipeline:
|
||||
name: "nyc taxi loader"
|
||||
libraries:
|
||||
- notebook:
|
||||
path: ./dlt/nyc_taxi_loader
|
||||
|
||||
environments:
|
||||
development:
|
||||
mode: development
|
||||
resources:
|
||||
pipelines:
|
||||
nyc_taxi_pipeline:
|
||||
target: nyc_taxi_development
|
||||
development: true
|
||||
|
||||
staging:
|
||||
resources:
|
||||
pipelines:
|
||||
nyc_taxi_pipeline:
|
||||
target: nyc_taxi_staging
|
||||
development: false
|
||||
|
||||
production:
|
||||
mode: production
|
||||
resources:
|
||||
pipelines:
|
||||
nyc_taxi_pipeline:
|
||||
target: nyc_taxi_production
|
||||
development: false
|
||||
photon: true
|
||||
|
||||
jobs:
|
||||
pipeline_schedule:
|
||||
name: Daily refresh of production pipeline
|
||||
|
||||
schedule:
|
||||
quartz_cron_expression: 6 6 11 * * ?
|
||||
timezone_id: UTC
|
||||
|
||||
tasks:
|
||||
- pipeline_task:
|
||||
pipeline_id: "to be interpolated"
|
|
@ -0,0 +1,56 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_job_and_pipeline", "development")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
|
||||
assert.Equal(t, b.Config.Bundle.Mode, config.Development)
|
||||
assert.True(t, p.Development)
|
||||
require.Len(t, p.Libraries, 1)
|
||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||
assert.Equal(t, "nyc_taxi_development", p.Target)
|
||||
}
|
||||
|
||||
func TestJobAndPipelineStagingWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_job_and_pipeline", "staging")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
|
||||
assert.False(t, p.Development)
|
||||
require.Len(t, p.Libraries, 1)
|
||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||
assert.Equal(t, "nyc_taxi_staging", p.Target)
|
||||
}
|
||||
|
||||
func TestJobAndPipelineProductionWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_job_and_pipeline", "production")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 1)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"]
|
||||
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath))
|
||||
assert.False(t, p.Development)
|
||||
require.Len(t, p.Libraries, 1)
|
||||
assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path)
|
||||
assert.Equal(t, "nyc_taxi_production", p.Target)
|
||||
|
||||
j := b.Config.Resources.Jobs["pipeline_schedule"]
|
||||
assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath))
|
||||
assert.Equal(t, "Daily refresh of production pipeline", j.Name)
|
||||
require.Len(t, j.Tasks, 1)
|
||||
assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId)
|
||||
}
|
|
@ -0,0 +1,35 @@
|
|||
bundle:
|
||||
name: override_job_cluster
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
name: job
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
spark_version: 13.3.x-scala2.12
|
||||
|
||||
environments:
|
||||
development:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
node_type_id: i3.xlarge
|
||||
num_workers: 1
|
||||
|
||||
staging:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
node_type_id: i3.2xlarge
|
||||
num_workers: 4
|
|
@ -0,0 +1,29 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestOverrideJobClusterDevWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_override_job_cluster", "development")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
|
||||
|
||||
c := b.Config.Resources.Jobs["foo"].JobClusters[0]
|
||||
assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion)
|
||||
assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 1, c.NewCluster.NumWorkers)
|
||||
}
|
||||
|
||||
func TestOverrideJobClusterStagingWithEnvironment(t *testing.T) {
|
||||
b := loadTarget(t, "./environments_override_job_cluster", "staging")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
|
||||
|
||||
c := b.Config.Resources.Jobs["foo"].JobClusters[0]
|
||||
assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion)
|
||||
assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 4, c.NewCluster.NumWorkers)
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
ref: refs/heads/feature-b
|
|
@ -0,0 +1,4 @@
|
|||
bundle:
|
||||
name: "Dancing Feet"
|
||||
git:
|
||||
branch: "feature-a"
|
|
@ -0,0 +1,39 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/libs/git"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestGitAutoLoad(t *testing.T) {
|
||||
b := load(t, "./autoload_git")
|
||||
assert.True(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli")
|
||||
}
|
||||
|
||||
func TestGitManuallySetBranch(t *testing.T) {
|
||||
b := loadTarget(t, "./autoload_git", "production")
|
||||
assert.False(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Equal(t, "main", b.Config.Bundle.Git.Branch)
|
||||
assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli")
|
||||
}
|
||||
|
||||
func TestGitBundleBranchValidation(t *testing.T) {
|
||||
git.GitDirectoryName = ".mock-git"
|
||||
t.Cleanup(func() {
|
||||
git.GitDirectoryName = ".git"
|
||||
})
|
||||
|
||||
b := load(t, "./git_branch_validation")
|
||||
assert.False(t, b.Config.Bundle.Git.Inferred)
|
||||
assert.Equal(t, "feature-a", b.Config.Bundle.Git.Branch)
|
||||
assert.Equal(t, "feature-b", b.Config.Bundle.Git.ActualBranch)
|
||||
|
||||
err := bundle.Apply(context.Background(), b, mutator.ValidateGitDetails())
|
||||
assert.ErrorContains(t, err, "not on the right Git branch:")
|
||||
}
|
|
@ -14,9 +14,10 @@ import (
|
|||
)
|
||||
|
||||
func TestIncludeInvalid(t *testing.T) {
|
||||
b, err := bundle.Load("./include_invalid")
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, "./include_invalid")
|
||||
require.NoError(t, err)
|
||||
err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), "notexists.yml defined in 'include' section does not match any files")
|
||||
}
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
bundle:
|
||||
name: foo ${workspace.profile}
|
||||
|
||||
workspace:
|
||||
profile: bar
|
||||
|
||||
targets:
|
||||
development:
|
||||
default: true
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
my_job:
|
||||
name: "${bundle.name} | ${workspace.profile} | ${bundle.environment} | ${bundle.target}"
|
|
@ -20,3 +20,15 @@ func TestInterpolation(t *testing.T) {
|
|||
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
||||
assert.Equal(t, "foo bar | bar", b.Config.Resources.Jobs["my_job"].Name)
|
||||
}
|
||||
|
||||
func TestInterpolationWithTarget(t *testing.T) {
|
||||
b := loadTarget(t, "./interpolation_target", "development")
|
||||
err := bundle.Apply(context.Background(), b, interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath("bundle"),
|
||||
interpolation.IncludeLookupsInPath("workspace"),
|
||||
))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "foo bar", b.Config.Bundle.Name)
|
||||
assert.Equal(t, "foo bar | bar | development | development", b.Config.Resources.Jobs["my_job"].Name)
|
||||
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ resources:
|
|||
- notebook:
|
||||
path: ./dlt/nyc_taxi_loader
|
||||
|
||||
environments:
|
||||
targets:
|
||||
development:
|
||||
mode: development
|
||||
resources:
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
)
|
||||
|
||||
func TestJobAndPipelineDevelopment(t *testing.T) {
|
||||
b := loadEnvironment(t, "./job_and_pipeline", "development")
|
||||
b := loadTarget(t, "./job_and_pipeline", "development")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
|
@ -24,7 +24,7 @@ func TestJobAndPipelineDevelopment(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestJobAndPipelineStaging(t *testing.T) {
|
||||
b := loadEnvironment(t, "./job_and_pipeline", "staging")
|
||||
b := loadTarget(t, "./job_and_pipeline", "staging")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 0)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
|
@ -37,7 +37,7 @@ func TestJobAndPipelineStaging(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestJobAndPipelineProduction(t *testing.T) {
|
||||
b := loadEnvironment(t, "./job_and_pipeline", "production")
|
||||
b := loadTarget(t, "./job_and_pipeline", "production")
|
||||
assert.Len(t, b.Config.Resources.Jobs, 1)
|
||||
assert.Len(t, b.Config.Resources.Pipelines, 1)
|
||||
|
||||
|
|
|
@ -10,16 +10,17 @@ import (
|
|||
)
|
||||
|
||||
func load(t *testing.T, path string) *bundle.Bundle {
|
||||
b, err := bundle.Load(path)
|
||||
ctx := context.Background()
|
||||
b, err := bundle.Load(ctx, path)
|
||||
require.NoError(t, err)
|
||||
err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...))
|
||||
require.NoError(t, err)
|
||||
return b
|
||||
}
|
||||
|
||||
func loadEnvironment(t *testing.T, path, env string) *bundle.Bundle {
|
||||
func loadTarget(t *testing.T, path, env string) *bundle.Bundle {
|
||||
b := load(t, path)
|
||||
err := bundle.Apply(context.Background(), b, mutator.SelectEnvironment(env))
|
||||
err := bundle.Apply(context.Background(), b, mutator.SelectTarget(env))
|
||||
require.NoError(t, err)
|
||||
return b
|
||||
}
|
||||
|
|
|
@ -0,0 +1,35 @@
|
|||
bundle:
|
||||
name: override_job_cluster
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
name: job
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
spark_version: 13.3.x-scala2.12
|
||||
|
||||
targets:
|
||||
development:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
node_type_id: i3.xlarge
|
||||
num_workers: 1
|
||||
|
||||
staging:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
job_clusters:
|
||||
- job_cluster_key: key
|
||||
new_cluster:
|
||||
node_type_id: i3.2xlarge
|
||||
num_workers: 4
|
|
@ -0,0 +1,29 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestOverrideJobClusterDev(t *testing.T) {
|
||||
b := loadTarget(t, "./override_job_cluster", "development")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
|
||||
|
||||
c := b.Config.Resources.Jobs["foo"].JobClusters[0]
|
||||
assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion)
|
||||
assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 1, c.NewCluster.NumWorkers)
|
||||
}
|
||||
|
||||
func TestOverrideJobClusterStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./override_job_cluster", "staging")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1)
|
||||
|
||||
c := b.Config.Resources.Jobs["foo"].JobClusters[0]
|
||||
assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion)
|
||||
assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId)
|
||||
assert.Equal(t, 4, c.NewCluster.NumWorkers)
|
||||
}
|
|
@ -0,0 +1,5 @@
|
|||
bundle:
|
||||
name: target_empty
|
||||
|
||||
targets:
|
||||
development:
|
|
@ -0,0 +1,12 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestTargetEmpty(t *testing.T) {
|
||||
b := loadTarget(t, "./target_empty", "development")
|
||||
assert.Equal(t, "development", b.Config.Bundle.Target)
|
||||
}
|
|
@ -0,0 +1,20 @@
|
|||
bundle:
|
||||
name: environment_overrides
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
job1:
|
||||
name: "base job"
|
||||
|
||||
targets:
|
||||
development:
|
||||
default: true
|
||||
|
||||
staging:
|
||||
resources:
|
||||
jobs:
|
||||
job1:
|
||||
name: "staging job"
|
|
@ -0,0 +1,14 @@
|
|||
bundle:
|
||||
name: environment_overrides
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
targets:
|
||||
development:
|
||||
workspace:
|
||||
host: https://development.acme.cloud.databricks.com/
|
||||
|
||||
staging:
|
||||
workspace:
|
||||
host: https://staging.acme.cloud.databricks.com/
|
|
@ -0,0 +1,27 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestTargetOverridesWorkspaceDev(t *testing.T) {
|
||||
b := loadTarget(t, "./target_overrides/workspace", "development")
|
||||
assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host)
|
||||
}
|
||||
|
||||
func TestTargetOverridesWorkspaceStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./target_overrides/workspace", "staging")
|
||||
assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host)
|
||||
}
|
||||
|
||||
func TestTargetOverridesResourcesDev(t *testing.T) {
|
||||
b := loadTarget(t, "./target_overrides/resources", "development")
|
||||
assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name)
|
||||
}
|
||||
|
||||
func TestTargetOverridesResourcesStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./target_overrides/resources", "staging")
|
||||
assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name)
|
||||
}
|
|
@ -12,7 +12,7 @@ bundle:
|
|||
workspace:
|
||||
profile: ${var.a} ${var.b}
|
||||
|
||||
environments:
|
||||
targets:
|
||||
env-with-single-variable-override:
|
||||
variables:
|
||||
b: dev-b
|
||||
|
|
|
@ -34,10 +34,10 @@ func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) {
|
|||
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
||||
}
|
||||
|
||||
func TestVariablesEnvironmentsBlockOverride(t *testing.T) {
|
||||
func TestVariablesTargetsBlockOverride(t *testing.T) {
|
||||
b := load(t, "./variables/env_overrides")
|
||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SelectEnvironment("env-with-single-variable-override"),
|
||||
mutator.SelectTarget("env-with-single-variable-override"),
|
||||
mutator.SetVariables(),
|
||||
interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
|
@ -46,10 +46,10 @@ func TestVariablesEnvironmentsBlockOverride(t *testing.T) {
|
|||
assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile)
|
||||
}
|
||||
|
||||
func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) {
|
||||
func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) {
|
||||
b := load(t, "./variables/env_overrides")
|
||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SelectEnvironment("env-with-two-variable-overrides"),
|
||||
mutator.SelectTarget("env-with-two-variable-overrides"),
|
||||
mutator.SetVariables(),
|
||||
interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
|
@ -58,11 +58,11 @@ func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) {
|
|||
assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile)
|
||||
}
|
||||
|
||||
func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) {
|
||||
func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) {
|
||||
t.Setenv("BUNDLE_VAR_b", "env-var-b")
|
||||
b := load(t, "./variables/env_overrides")
|
||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SelectEnvironment("env-with-two-variable-overrides"),
|
||||
mutator.SelectTarget("env-with-two-variable-overrides"),
|
||||
mutator.SetVariables(),
|
||||
interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
|
@ -71,10 +71,10 @@ func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) {
|
|||
assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile)
|
||||
}
|
||||
|
||||
func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) {
|
||||
func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) {
|
||||
b := load(t, "./variables/env_overrides")
|
||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SelectEnvironment("env-missing-a-required-variable-assignment"),
|
||||
mutator.SelectTarget("env-missing-a-required-variable-assignment"),
|
||||
mutator.SetVariables(),
|
||||
interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
|
@ -82,10 +82,10 @@ func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) {
|
|||
assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable")
|
||||
}
|
||||
|
||||
func TestVariablesEnvironmentsBlockOverrideWithUndefinedVariables(t *testing.T) {
|
||||
func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) {
|
||||
b := load(t, "./variables/env_overrides")
|
||||
err := bundle.Apply(context.Background(), b, bundle.Seq(
|
||||
mutator.SelectEnvironment("env-using-an-undefined-variable"),
|
||||
mutator.SelectTarget("env-using-an-undefined-variable"),
|
||||
mutator.SetVariables(),
|
||||
interpolation.Interpolate(
|
||||
interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix),
|
||||
|
|
|
@ -9,6 +9,7 @@ import (
|
|||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/libs/databrickscfg"
|
||||
"github.com/databricks/databricks-sdk-go/config"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/ini.v1"
|
||||
|
@ -28,7 +29,7 @@ func canonicalHost(host string) (string, error) {
|
|||
|
||||
var ErrNoMatchingProfiles = errors.New("no matching profiles found")
|
||||
|
||||
func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error) {
|
||||
func resolveSection(cfg *config.Config, iniFile *config.File) (*ini.Section, error) {
|
||||
var candidates []*ini.Section
|
||||
configuredHost, err := canonicalHost(cfg.Host)
|
||||
if err != nil {
|
||||
|
@ -68,7 +69,7 @@ func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error)
|
|||
}
|
||||
|
||||
func loadFromDatabricksCfg(cfg *config.Config) error {
|
||||
iniFile, err := getDatabricksCfg()
|
||||
iniFile, err := databrickscfg.Get()
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
// it's fine not to have ~/.databrickscfg
|
||||
return nil
|
||||
|
|
|
@ -61,7 +61,7 @@ func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command {
|
|||
}
|
||||
|
||||
// If the chosen profile has a hostname and the user hasn't specified a host, infer the host from the profile.
|
||||
_, profiles, err := databrickscfg.LoadProfiles(databrickscfg.DefaultPath, func(p databrickscfg.Profile) bool {
|
||||
_, profiles, err := databrickscfg.LoadProfiles(func(p databrickscfg.Profile) bool {
|
||||
return p.Name == profileName
|
||||
})
|
||||
if err != nil {
|
||||
|
|
|
@ -5,32 +5,16 @@ import (
|
|||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/databricks/cli/libs/cmdio"
|
||||
"github.com/databricks/cli/libs/databrickscfg"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/databricks/databricks-sdk-go/config"
|
||||
"github.com/spf13/cobra"
|
||||
"gopkg.in/ini.v1"
|
||||
)
|
||||
|
||||
func getDatabricksCfg() (*ini.File, error) {
|
||||
configFile := os.Getenv("DATABRICKS_CONFIG_FILE")
|
||||
if configFile == "" {
|
||||
configFile = "~/.databrickscfg"
|
||||
}
|
||||
if strings.HasPrefix(configFile, "~") {
|
||||
homedir, err := os.UserHomeDir()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot find homedir: %w", err)
|
||||
}
|
||||
configFile = filepath.Join(homedir, configFile[1:])
|
||||
}
|
||||
return ini.Load(configFile)
|
||||
}
|
||||
|
||||
type profileMetadata struct {
|
||||
Name string `json:"name"`
|
||||
Host string `json:"host,omitempty"`
|
||||
|
@ -111,10 +95,12 @@ func newProfilesCommand() *cobra.Command {
|
|||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
var profiles []*profileMetadata
|
||||
iniFile, err := getDatabricksCfg()
|
||||
iniFile, err := databrickscfg.Get()
|
||||
if os.IsNotExist(err) {
|
||||
// return empty list for non-configured machines
|
||||
iniFile = ini.Empty()
|
||||
iniFile = &config.File{
|
||||
File: &ini.File{},
|
||||
}
|
||||
} else if err != nil {
|
||||
return fmt.Errorf("cannot parse config file: %w", err)
|
||||
}
|
||||
|
|
|
@ -19,5 +19,6 @@ func New() *cobra.Command {
|
|||
cmd.AddCommand(newSyncCommand())
|
||||
cmd.AddCommand(newTestCommand())
|
||||
cmd.AddCommand(newValidateCommand())
|
||||
cmd.AddCommand(newInitCommand())
|
||||
return cmd
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@ func newDeployCommand() *cobra.Command {
|
|||
var forceLock bool
|
||||
var computeID string
|
||||
cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation.")
|
||||
cmd.Flags().BoolVar(&forceLock, "force-deploy", false, "Force acquisition of deployment lock.")
|
||||
cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.")
|
||||
cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.")
|
||||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
package bundle
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/databricks/cli/libs/git"
|
||||
"github.com/databricks/cli/libs/template"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var gitUrlPrefixes = []string{
|
||||
"https://",
|
||||
"git@",
|
||||
}
|
||||
|
||||
func isRepoUrl(url string) bool {
|
||||
result := false
|
||||
for _, prefix := range gitUrlPrefixes {
|
||||
if strings.HasPrefix(url, prefix) {
|
||||
result = true
|
||||
break
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// Computes the repo name from the repo URL. Treats the last non empty word
|
||||
// when splitting at '/' as the repo name. For example: for url git@github.com:databricks/cli.git
|
||||
// the name would be "cli.git"
|
||||
func repoName(url string) string {
|
||||
parts := strings.Split(strings.TrimRight(url, "/"), "/")
|
||||
return parts[len(parts)-1]
|
||||
}
|
||||
|
||||
func newInitCommand() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "init TEMPLATE_PATH",
|
||||
Short: "Initialize Template",
|
||||
Args: cobra.ExactArgs(1),
|
||||
}
|
||||
|
||||
var configFile string
|
||||
var outputDir string
|
||||
cmd.Flags().StringVar(&configFile, "config-file", "", "File containing input parameters for template initialization.")
|
||||
cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to.")
|
||||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
templatePath := args[0]
|
||||
ctx := cmd.Context()
|
||||
|
||||
if !isRepoUrl(templatePath) {
|
||||
// skip downloading the repo because input arg is not a URL. We assume
|
||||
// it's a path on the local file system in that case
|
||||
return template.Materialize(ctx, configFile, templatePath, outputDir)
|
||||
}
|
||||
|
||||
// Download the template in a temporary directory
|
||||
tmpDir := os.TempDir()
|
||||
templateURL := templatePath
|
||||
templateDir := filepath.Join(tmpDir, repoName(templateURL))
|
||||
err := os.MkdirAll(templateDir, 0755)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// TODO: Add automated test that the downloaded git repo is cleaned up.
|
||||
err = git.Clone(ctx, templateURL, "", templateDir)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.RemoveAll(templateDir)
|
||||
|
||||
return template.Materialize(ctx, configFile, templateDir, outputDir)
|
||||
}
|
||||
|
||||
return cmd
|
||||
}
|
|
@ -0,0 +1,27 @@
|
|||
package bundle
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestBundleInitIsRepoUrl(t *testing.T) {
|
||||
assert.True(t, isRepoUrl("git@github.com:databricks/cli.git"))
|
||||
assert.True(t, isRepoUrl("https://github.com/databricks/cli.git"))
|
||||
|
||||
assert.False(t, isRepoUrl("./local"))
|
||||
assert.False(t, isRepoUrl("foo"))
|
||||
}
|
||||
|
||||
func TestBundleInitRepoName(t *testing.T) {
|
||||
// Test valid URLs
|
||||
assert.Equal(t, "cli.git", repoName("git@github.com:databricks/cli.git"))
|
||||
assert.Equal(t, "cli", repoName("https://github.com/databricks/cli/"))
|
||||
|
||||
// test invalid URLs. In these cases the error would be floated when the
|
||||
// git clone operation fails.
|
||||
assert.Equal(t, "git@github.com:databricks", repoName("git@github.com:databricks"))
|
||||
assert.Equal(t, "invalid-url", repoName("invalid-url"))
|
||||
assert.Equal(t, "www.github.com", repoName("https://www.github.com"))
|
||||
}
|
|
@ -23,9 +23,16 @@ func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle)
|
|||
return nil, fmt.Errorf("cannot get bundle cache directory: %w", err)
|
||||
}
|
||||
|
||||
includes, err := b.GetSyncIncludePatterns()
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot get list of sync includes: %w", err)
|
||||
}
|
||||
|
||||
opts := sync.SyncOptions{
|
||||
LocalPath: b.Config.Path,
|
||||
RemotePath: b.Config.Workspace.FilesPath,
|
||||
Include: includes,
|
||||
Exclude: b.Config.Sync.Exclude,
|
||||
Full: f.full,
|
||||
PollInterval: f.interval,
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
)
|
||||
|
||||
func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error {
|
||||
// Load bundle config and apply environment
|
||||
// Load bundle config and apply target
|
||||
err := root.MustConfigureBundle(cmd, args)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -131,7 +131,7 @@ func newConfigureCommand() *cobra.Command {
|
|||
|
||||
// Include token flag for compatibility with the legacy CLI.
|
||||
// It doesn't actually do anything because we always use PATs.
|
||||
cmd.Flags().BoolP("token", "t", true, "Configure using Databricks Personal Access Token")
|
||||
cmd.Flags().Bool("token", true, "Configure using Databricks Personal Access Token")
|
||||
cmd.Flags().MarkHidden("token")
|
||||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
|
|
|
@ -40,10 +40,7 @@ func MustAccountClient(cmd *cobra.Command, args []string) error {
|
|||
// 1. only admins will have account configured
|
||||
// 2. 99% of admins will have access to just one account
|
||||
// hence, we don't need to create a special "DEFAULT_ACCOUNT" profile yet
|
||||
_, profiles, err := databrickscfg.LoadProfiles(
|
||||
databrickscfg.DefaultPath,
|
||||
databrickscfg.MatchAccountProfiles,
|
||||
)
|
||||
_, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -124,8 +121,11 @@ func transformLoadError(path string, err error) error {
|
|||
}
|
||||
|
||||
func askForWorkspaceProfile() (string, error) {
|
||||
path := databrickscfg.DefaultPath
|
||||
file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchWorkspaceProfiles)
|
||||
path, err := databrickscfg.GetPath()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("cannot determine Databricks config file path: %w", err)
|
||||
}
|
||||
file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchWorkspaceProfiles)
|
||||
if err != nil {
|
||||
return "", transformLoadError(path, err)
|
||||
}
|
||||
|
@ -156,8 +156,11 @@ func askForWorkspaceProfile() (string, error) {
|
|||
}
|
||||
|
||||
func askForAccountProfile() (string, error) {
|
||||
path := databrickscfg.DefaultPath
|
||||
file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchAccountProfiles)
|
||||
path, err := databrickscfg.GetPath()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("cannot determine Databricks config file path: %w", err)
|
||||
}
|
||||
file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles)
|
||||
if err != nil {
|
||||
return "", transformLoadError(path, err)
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue