mirror of https://github.com/databricks/cli.git
merge
This commit is contained in:
commit
30d683a7bf
|
@ -7,12 +7,16 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
cleanup:
|
cleanup:
|
||||||
|
name: Stale issue job
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
issues: write
|
issues: write
|
||||||
contents: read
|
contents: read
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
runs-on: ubuntu-latest
|
|
||||||
name: Stale issue job
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@v9
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -13,7 +13,10 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
comment-on-pr:
|
comment-on-pr:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,9 @@ jobs:
|
||||||
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
|
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
|
||||||
#
|
#
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Auto-approve squashed commit
|
- name: Auto-approve squashed commit
|
||||||
|
|
|
@ -11,7 +11,10 @@ jobs:
|
||||||
# This workflow triggers the integration test workflow in a different repository.
|
# This workflow triggers the integration test workflow in a different repository.
|
||||||
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: "test-trigger-is"
|
environment: "test-trigger-is"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
|
@ -10,7 +10,10 @@ jobs:
|
||||||
# This workflow triggers the integration test workflow in a different repository.
|
# This workflow triggers the integration test workflow in a different repository.
|
||||||
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: "test-trigger-is"
|
environment: "test-trigger-is"
|
||||||
|
|
||||||
# Only run this job for PRs from branches on the main repository and not from forks.
|
# Only run this job for PRs from branches on the main repository and not from forks.
|
||||||
|
|
|
@ -13,12 +13,26 @@ on:
|
||||||
# seed the build cache.
|
# seed the build cache.
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0,12 * * *' # Runs at 00:00 and 12:00 UTC daily
|
||||||
|
|
||||||
env:
|
env:
|
||||||
GOTESTSUM_FORMAT: github-actions
|
GOTESTSUM_FORMAT: github-actions
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
cleanups:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
steps:
|
||||||
|
- name: Clean up cache if running on schedule
|
||||||
|
if: ${{ github.event_name == 'schedule' }}
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: gh cache delete --all --repo databricks/cli || true
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
|
needs: cleanups
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -61,6 +75,7 @@ jobs:
|
||||||
run: make test
|
run: make test
|
||||||
|
|
||||||
golangci:
|
golangci:
|
||||||
|
needs: cleanups
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
@ -68,6 +83,10 @@ jobs:
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
cache-dependency-path: |
|
||||||
|
go.sum
|
||||||
|
.golangci.yaml
|
||||||
- name: Run go mod tidy
|
- name: Run go mod tidy
|
||||||
run: |
|
run: |
|
||||||
go mod tidy
|
go mod tidy
|
||||||
|
@ -82,6 +101,7 @@ jobs:
|
||||||
args: --timeout=15m
|
args: --timeout=15m
|
||||||
|
|
||||||
validate-bundle-schema:
|
validate-bundle-schema:
|
||||||
|
needs: cleanups
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
@ -92,6 +112,10 @@ jobs:
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
cache-dependency-path: |
|
||||||
|
go.sum
|
||||||
|
bundle/internal/schema/*.*
|
||||||
|
|
||||||
- name: Verify that the schema is up to date
|
- name: Verify that the schema is up to date
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -20,7 +20,10 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
|
@ -9,9 +9,13 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
artifacts: ${{ steps.releaser.outputs.artifacts }}
|
artifacts: ${{ steps.releaser.outputs.artifacts }}
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -54,8 +58,12 @@ jobs:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
create-setup-cli-release-pr:
|
create-setup-cli-release-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -78,8 +86,12 @@ jobs:
|
||||||
});
|
});
|
||||||
|
|
||||||
create-homebrew-tap-release-pr:
|
create-homebrew-tap-release-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -115,8 +127,12 @@ jobs:
|
||||||
});
|
});
|
||||||
|
|
||||||
create-vscode-extension-update-pr:
|
create-vscode-extension-update-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -14,6 +14,7 @@ linters:
|
||||||
- testifylint
|
- testifylint
|
||||||
- intrange
|
- intrange
|
||||||
- mirror
|
- mirror
|
||||||
|
- perfsprint
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
enable-all: true
|
enable-all: true
|
||||||
|
|
|
@ -1,5 +1,14 @@
|
||||||
# Version changelog
|
# Version changelog
|
||||||
|
|
||||||
|
## [Release] Release v0.238.0
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Fix finding Python within virtualenv on Windows ([#2034](https://github.com/databricks/cli/pull/2034)).
|
||||||
|
* Include missing field descriptions in JSON schema ([#2045](https://github.com/databricks/cli/pull/2045)).
|
||||||
|
* Add validation for volume referenced from `artifact_path` ([#2050](https://github.com/databricks/cli/pull/2050)).
|
||||||
|
* Handle `${workspace.file_path}` references in source-linked deployments ([#2046](https://github.com/databricks/cli/pull/2046)).
|
||||||
|
* Set the write bit for files written during template initialization ([#2068](https://github.com/databricks/cli/pull/2068)).
|
||||||
|
|
||||||
## [Release] Release v0.237.0
|
## [Release] Release v0.237.0
|
||||||
|
|
||||||
Bundles:
|
Bundles:
|
||||||
|
|
2
Makefile
2
Makefile
|
@ -1,6 +1,6 @@
|
||||||
default: build
|
default: build
|
||||||
|
|
||||||
PACKAGES=./libs/... ./internal/... ./cmd/... ./bundle/... .
|
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||||
|
|
||||||
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
Acceptance tests are blackbox tests that are run against compiled binary.
|
||||||
|
|
||||||
|
Currently these tests are run against "fake" HTTP server pretending to be Databricks API. However, they will be extended to run against real environment as regular integration tests.
|
||||||
|
|
||||||
|
To author a test,
|
||||||
|
- Add a new directory under `acceptance`. Any level of nesting is supported.
|
||||||
|
- Add `databricks.yml` there.
|
||||||
|
- Add `script` with commands to run, e.g. `$CLI bundle validate`. The test case is recognized by presence of `script`.
|
||||||
|
|
||||||
|
The test runner will run script and capture output and compare it with `output.txt` file in the same directory.
|
||||||
|
|
||||||
|
In order to write `output.txt` for the first time or overwrite it with the current output pass -update flag to go test.
|
||||||
|
|
||||||
|
The scripts are run with `bash -e` so any errors will be propagated. They are captured in `output.txt` by appending `Exit code: N` line at the end.
|
||||||
|
|
||||||
|
For more complex tests one can also use:
|
||||||
|
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
||||||
|
- `trace` helper: prints the arguments before executing the command.
|
||||||
|
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
|
@ -0,0 +1,302 @@
|
||||||
|
package acceptance_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"slices"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/internal/testutil"
|
||||||
|
"github.com/databricks/cli/libs/env"
|
||||||
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
var KeepTmp = os.Getenv("KEEP_TMP") != ""
|
||||||
|
|
||||||
|
const (
|
||||||
|
EntryPointScript = "script"
|
||||||
|
CleanupScript = "script.cleanup"
|
||||||
|
PrepareScript = "script.prepare"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Scripts = map[string]bool{
|
||||||
|
EntryPointScript: true,
|
||||||
|
CleanupScript: true,
|
||||||
|
PrepareScript: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccept(t *testing.T) {
|
||||||
|
execPath := BuildCLI(t)
|
||||||
|
// $CLI is what test scripts are using
|
||||||
|
t.Setenv("CLI", execPath)
|
||||||
|
|
||||||
|
server := StartServer(t)
|
||||||
|
AddHandlers(server)
|
||||||
|
// Redirect API access to local server:
|
||||||
|
t.Setenv("DATABRICKS_HOST", fmt.Sprintf("http://127.0.0.1:%d", server.Port))
|
||||||
|
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
||||||
|
|
||||||
|
homeDir := t.TempDir()
|
||||||
|
// Do not read user's ~/.databrickscfg
|
||||||
|
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||||
|
|
||||||
|
testDirs := getTests(t)
|
||||||
|
require.NotEmpty(t, testDirs)
|
||||||
|
for _, dir := range testDirs {
|
||||||
|
t.Run(dir, func(t *testing.T) {
|
||||||
|
t.Parallel()
|
||||||
|
runTest(t, dir)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTests(t *testing.T) []string {
|
||||||
|
testDirs := make([]string, 0, 128)
|
||||||
|
|
||||||
|
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
name := filepath.Base(path)
|
||||||
|
if name == EntryPointScript {
|
||||||
|
// Presence of 'script' marks a test case in this directory
|
||||||
|
testDirs = append(testDirs, filepath.Dir(path))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
sort.Strings(testDirs)
|
||||||
|
return testDirs
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTest(t *testing.T, dir string) {
|
||||||
|
var tmpDir string
|
||||||
|
var err error
|
||||||
|
if KeepTmp {
|
||||||
|
tempDirBase := filepath.Join(os.TempDir(), "acceptance")
|
||||||
|
_ = os.Mkdir(tempDirBase, 0o755)
|
||||||
|
tmpDir, err = os.MkdirTemp(tempDirBase, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
t.Logf("Created directory: %s", tmpDir)
|
||||||
|
} else {
|
||||||
|
tmpDir = t.TempDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
scriptContents := readMergedScriptContents(t, dir)
|
||||||
|
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||||
|
|
||||||
|
inputs := make(map[string]bool, 2)
|
||||||
|
outputs := make(map[string]bool, 2)
|
||||||
|
err = CopyDir(dir, tmpDir, inputs, outputs)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||||
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
cmd.Dir = tmpDir
|
||||||
|
outB, err := cmd.CombinedOutput()
|
||||||
|
|
||||||
|
out := formatOutput(string(outB), err)
|
||||||
|
out = strings.ReplaceAll(out, os.Getenv("CLI"), "$CLI")
|
||||||
|
doComparison(t, filepath.Join(dir, "output.txt"), "script output", out)
|
||||||
|
|
||||||
|
for key := range outputs {
|
||||||
|
if key == "output.txt" {
|
||||||
|
// handled above
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pathNew := filepath.Join(tmpDir, key)
|
||||||
|
newValBytes, err := os.ReadFile(pathNew)
|
||||||
|
if err != nil {
|
||||||
|
if errors.Is(err, os.ErrNotExist) {
|
||||||
|
t.Errorf("%s: expected to find this file but could not (%s)", key, tmpDir)
|
||||||
|
} else {
|
||||||
|
t.Errorf("%s: could not read: %s", key, err)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pathExpected := filepath.Join(dir, key)
|
||||||
|
doComparison(t, pathExpected, pathNew, string(newValBytes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure there are not unaccounted for new files
|
||||||
|
files, err := os.ReadDir(tmpDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
for _, f := range files {
|
||||||
|
name := f.Name()
|
||||||
|
if _, ok := inputs[name]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := outputs[name]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
t.Errorf("Unexpected output: %s", f)
|
||||||
|
if strings.HasPrefix(name, "out") {
|
||||||
|
// We have a new file starting with "out"
|
||||||
|
// Show the contents & support overwrite mode for it:
|
||||||
|
pathNew := filepath.Join(tmpDir, name)
|
||||||
|
newVal := testutil.ReadFile(t, pathNew)
|
||||||
|
doComparison(t, filepath.Join(dir, name), filepath.Join(tmpDir, name), newVal)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) {
|
||||||
|
valueNew = testdiff.NormalizeNewlines(valueNew)
|
||||||
|
valueExpected := string(readIfExists(t, pathExpected))
|
||||||
|
valueExpected = testdiff.NormalizeNewlines(valueExpected)
|
||||||
|
testdiff.AssertEqualTexts(t, pathExpected, pathNew, valueExpected, valueNew)
|
||||||
|
if testdiff.OverwriteMode {
|
||||||
|
if valueNew != "" {
|
||||||
|
t.Logf("Overwriting: %s", pathExpected)
|
||||||
|
testutil.WriteFile(t, pathExpected, valueNew)
|
||||||
|
} else {
|
||||||
|
t.Logf("Removing: %s", pathExpected)
|
||||||
|
_ = os.Remove(pathExpected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
||||||
|
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
|
||||||
|
func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
|
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
|
||||||
|
prepares := []string{}
|
||||||
|
cleanups := []string{}
|
||||||
|
|
||||||
|
for {
|
||||||
|
x := readIfExists(t, filepath.Join(dir, CleanupScript))
|
||||||
|
if len(x) > 0 {
|
||||||
|
cleanups = append(cleanups, string(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
x = readIfExists(t, filepath.Join(dir, PrepareScript))
|
||||||
|
if len(x) > 0 {
|
||||||
|
prepares = append(prepares, string(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
if dir == "" || dir == "." {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
dir = filepath.Dir(dir)
|
||||||
|
require.True(t, filepath.IsLocal(dir))
|
||||||
|
}
|
||||||
|
|
||||||
|
slices.Reverse(prepares)
|
||||||
|
prepares = append(prepares, scriptContents)
|
||||||
|
prepares = append(prepares, cleanups...)
|
||||||
|
return strings.Join(prepares, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func BuildCLI(t *testing.T) string {
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
require.NoError(t, err)
|
||||||
|
execPath := filepath.Join(cwd, "build", "databricks")
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
execPath += ".exe"
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
args := []string{"go", "build", "-mod", "vendor", "-o", execPath}
|
||||||
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
cmd.Dir = ".."
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
t.Logf("%s took %s", args, elapsed)
|
||||||
|
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
|
||||||
|
if len(out) > 0 {
|
||||||
|
t.Logf("go build output: %s: %s", args, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quick check + warm up cache:
|
||||||
|
cmd = exec.Command(execPath, "--version")
|
||||||
|
out, err = cmd.CombinedOutput()
|
||||||
|
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
|
||||||
|
return execPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
in, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer in.Close()
|
||||||
|
|
||||||
|
out, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(out, in)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatOutput(out string, err error) string {
|
||||||
|
if err == nil {
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
if exiterr, ok := err.(*exec.ExitError); ok {
|
||||||
|
exitCode := exiterr.ExitCode()
|
||||||
|
out += fmt.Sprintf("\nExit code: %d\n", exitCode)
|
||||||
|
} else {
|
||||||
|
out += fmt.Sprintf("\nError: %s\n", err)
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
|
func readIfExists(t *testing.T, path string) []byte {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err == nil {
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
|
||||||
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
|
t.Fatalf("%s: %s", path, err)
|
||||||
|
}
|
||||||
|
return []byte{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||||
|
return filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
name := info.Name()
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(src, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(name, "out") {
|
||||||
|
outputs[relPath] = true
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
inputs[relPath] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := Scripts[name]; ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
destPath := filepath.Join(dst, relPath)
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return os.MkdirAll(destPath, info.Mode())
|
||||||
|
}
|
||||||
|
|
||||||
|
return copyFile(path, destPath)
|
||||||
|
})
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
databricks
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: clusters
|
name: clusters
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
clusters:
|
clusters:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t default
|
||||||
|
{
|
||||||
|
"autoscale": {
|
||||||
|
"max_workers": 7,
|
||||||
|
"min_workers": 2
|
||||||
|
},
|
||||||
|
"cluster_name": "foo",
|
||||||
|
"custom_tags": {},
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 2,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.executor.memory": "2g"
|
||||||
|
},
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"autoscale": {
|
||||||
|
"max_workers": 3,
|
||||||
|
"min_workers": 1
|
||||||
|
},
|
||||||
|
"cluster_name": "foo-override",
|
||||||
|
"custom_tags": {},
|
||||||
|
"node_type_id": "m5.xlarge",
|
||||||
|
"num_workers": 3,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.executor.memory": "4g",
|
||||||
|
"spark.executor.memory2": "4g"
|
||||||
|
},
|
||||||
|
"spark_version": "15.2.x-scala2.12"
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t default | jq .resources.clusters.foo
|
||||||
|
trace $CLI bundle validate -o json -t development | jq .resources.clusters.foo
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_job_cluster
|
name: override_job_cluster
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,56 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq '.resources.jobs'
|
|
@ -0,0 +1,37 @@
|
||||||
|
bundle:
|
||||||
|
name: override_job_cluster
|
||||||
|
|
||||||
|
variables:
|
||||||
|
mykey:
|
||||||
|
default: key
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
name: job
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: key
|
||||||
|
new_cluster:
|
||||||
|
spark_version: 13.3.x-scala2.12
|
||||||
|
|
||||||
|
targets:
|
||||||
|
development:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
job_clusters:
|
||||||
|
# This does not work because merging is done before resolution
|
||||||
|
- job_cluster_key: "${var.mykey}"
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: i3.xlarge
|
||||||
|
num_workers: 1
|
||||||
|
|
||||||
|
staging:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: "${var.mykey}"
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: i3.2xlarge
|
||||||
|
num_workers: 4
|
|
@ -0,0 +1,84 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t development
|
||||||
|
Name: override_job_cluster
|
||||||
|
Target: development
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t staging
|
||||||
|
Name: override_job_cluster
|
||||||
|
Target: staging
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,4 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -t development
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -t staging
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_job_tasks
|
name: override_job_tasks
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,68 @@
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json -t development
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"name": "job",
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test1.py"
|
||||||
|
},
|
||||||
|
"task_key": "key1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test2.py"
|
||||||
|
},
|
||||||
|
"task_key": "key2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json -t staging
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"name": "job",
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test1.py"
|
||||||
|
},
|
||||||
|
"task_key": "key1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test3.py"
|
||||||
|
},
|
||||||
|
"task_key": "key2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace errcode $CLI bundle validate -o json -t development | jq .resources.jobs.foo
|
||||||
|
trace errcode $CLI bundle validate -o json -t staging | jq .resources.jobs.foo
|
|
@ -0,0 +1,13 @@
|
||||||
|
bundle:
|
||||||
|
name: merge-string-map
|
||||||
|
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster: "hello"
|
||||||
|
|
||||||
|
targets:
|
||||||
|
dev:
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster:
|
||||||
|
spark_version: "25"
|
|
@ -0,0 +1,23 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t dev
|
||||||
|
{
|
||||||
|
"clusters": {
|
||||||
|
"my_cluster": {
|
||||||
|
"custom_tags": {},
|
||||||
|
"spark_version": "25"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Warning: expected map, found string
|
||||||
|
at resources.clusters.my_cluster
|
||||||
|
in databricks.yml:6:17
|
||||||
|
|
||||||
|
Name: merge-string-map
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/merge-string-map/dev
|
||||||
|
|
||||||
|
Found 1 warning
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t dev | jq .resources
|
||||||
|
trace $CLI bundle validate -t dev
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_pipeline_cluster
|
name: override_pipeline_cluster
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
pipelines:
|
pipelines:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,44 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_conf": {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "job",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "job",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq .resources.pipelines
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq .resources.pipelines
|
|
@ -0,0 +1,19 @@
|
||||||
|
bundle:
|
||||||
|
name: complex-transitive
|
||||||
|
|
||||||
|
variables:
|
||||||
|
catalog:
|
||||||
|
default: hive_metastore
|
||||||
|
spark_conf:
|
||||||
|
default:
|
||||||
|
"spark.databricks.sql.initial.catalog.name": ${var.catalog}
|
||||||
|
etl_cluster_config:
|
||||||
|
type: complex
|
||||||
|
default:
|
||||||
|
spark_version: 14.3.x-scala2.12
|
||||||
|
runtime_engine: PHOTON
|
||||||
|
spark_conf: ${var.spark_conf}
|
||||||
|
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster: ${var.etl_cluster_config}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"spark.databricks.sql.initial.catalog.name": "${var.catalog}"
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Currently, this incorrectly outputs variable reference instead of resolved value
|
||||||
|
$CLI bundle validate -o json | jq '.resources.clusters.my_cluster.spark_conf'
|
|
@ -11,6 +11,7 @@ resources:
|
||||||
- task_key: test
|
- task_key: test
|
||||||
job_cluster_key: key
|
job_cluster_key: key
|
||||||
libraries: ${variables.libraries.value}
|
libraries: ${variables.libraries.value}
|
||||||
|
# specific fields of complex variable are referenced:
|
||||||
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"
|
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
|
@ -35,30 +36,21 @@ variables:
|
||||||
- jar: "/path/to/jar"
|
- jar: "/path/to/jar"
|
||||||
- egg: "/path/to/egg"
|
- egg: "/path/to/egg"
|
||||||
- whl: "/path/to/whl"
|
- whl: "/path/to/whl"
|
||||||
complexvar:
|
|
||||||
type: complex
|
|
||||||
description: "A complex variable"
|
|
||||||
default:
|
|
||||||
key1: "value1"
|
|
||||||
key2: "value2"
|
|
||||||
key3: "value3"
|
|
||||||
|
|
||||||
|
|
||||||
targets:
|
targets:
|
||||||
default:
|
default:
|
||||||
|
default: true
|
||||||
dev:
|
dev:
|
||||||
variables:
|
variables:
|
||||||
node_type: "Standard_DS3_v3"
|
node_type: "Standard_DS3_v3"
|
||||||
cluster:
|
cluster:
|
||||||
|
# complex variables are not merged, so missing variables (policy_id) are not inherited
|
||||||
spark_version: "14.2.x-scala2.11"
|
spark_version: "14.2.x-scala2.11"
|
||||||
node_type_id: ${var.node_type}
|
node_type_id: ${var.node_type}
|
||||||
num_workers: 4
|
num_workers: 4
|
||||||
spark_conf:
|
spark_conf:
|
||||||
spark.speculation: false
|
spark.speculation: false
|
||||||
spark.databricks.delta.retentionDurationCheck.enabled: false
|
spark.databricks.delta.retentionDurationCheck.enabled: false
|
||||||
complexvar:
|
libraries:
|
||||||
type: complex
|
- jar: "/newpath/to/jar"
|
||||||
default:
|
- whl: "/newpath/to/whl"
|
||||||
key1: "1"
|
|
||||||
key2: "2"
|
|
||||||
key3: "3"
|
|
|
@ -0,0 +1,110 @@
|
||||||
|
{
|
||||||
|
"resources": {
|
||||||
|
"jobs": {
|
||||||
|
"my_job": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables/default/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2,
|
||||||
|
"policy_id": "some-policy-id",
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.random": "true",
|
||||||
|
"spark.speculation": "true"
|
||||||
|
},
|
||||||
|
"spark_version": "13.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"jar": "/path/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"egg": "/path/to/egg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/path/to/whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"task_key": "task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"cluster": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2,
|
||||||
|
"policy_id": "some-policy-id",
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.random": true,
|
||||||
|
"spark.speculation": true
|
||||||
|
},
|
||||||
|
"spark_version": "13.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2,
|
||||||
|
"policy_id": "some-policy-id",
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.random": true,
|
||||||
|
"spark.speculation": true
|
||||||
|
},
|
||||||
|
"spark_version": "13.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"libraries": {
|
||||||
|
"default": [
|
||||||
|
{
|
||||||
|
"jar": "/path/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"egg": "/path/to/egg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/path/to/whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "A libraries definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"jar": "/path/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"egg": "/path/to/egg"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/path/to/whl"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_type": {
|
||||||
|
"default": "Standard_DS3_v2",
|
||||||
|
"value": "Standard_DS3_v2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,95 @@
|
||||||
|
{
|
||||||
|
"resources": {
|
||||||
|
"jobs": {
|
||||||
|
"my_job": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables/dev/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v3",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.speculation": "false"
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"jar": "/newpath/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/newpath/to/whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"task_key": "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"cluster": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v3",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v3",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"libraries": {
|
||||||
|
"default": [
|
||||||
|
{
|
||||||
|
"jar": "/newpath/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/newpath/to/whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"description": "A libraries definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": [
|
||||||
|
{
|
||||||
|
"jar": "/newpath/to/jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "/newpath/to/whl"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node_type": {
|
||||||
|
"default": "Standard_DS3_v3",
|
||||||
|
"value": "Standard_DS3_v3"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,14 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
|
||||||
|
>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
|
||||||
|
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t dev
|
||||||
|
|
||||||
|
>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
|
||||||
|
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
|
||||||
|
policy_id and spark_conf.spark_random fields do not exist in dev target:
|
||||||
|
|
||||||
|
>>> jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json
|
||||||
|
null
|
|
@ -0,0 +1,8 @@
|
||||||
|
trace $CLI bundle validate -o json | jq '{resources,variables}' > out.default.json
|
||||||
|
trace jq .resources.jobs.my_job.tasks[0].task_key out.default.json | grep "task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
|
||||||
|
|
||||||
|
trace $CLI bundle validate -o json -t dev | jq '{resources,variables}' > out.dev.json
|
||||||
|
trace jq .resources.jobs.my_job.tasks[0].task_key out.dev.json | grep "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
|
||||||
|
|
||||||
|
echo policy_id and spark_conf.spark_random fields do not exist in dev target:
|
||||||
|
trace jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json | grep null
|
|
@ -0,0 +1,159 @@
|
||||||
|
{
|
||||||
|
"resources": {
|
||||||
|
"jobs": {
|
||||||
|
"my_job": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key1",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.speculation": "false"
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key2",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.speculation": "false"
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key3",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.speculation": "false"
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key4",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": "false",
|
||||||
|
"spark.speculation": "false"
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"cluster1": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cluster2": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cluster3": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cluster4": {
|
||||||
|
"default": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
},
|
||||||
|
"description": "A cluster definition",
|
||||||
|
"type": "complex",
|
||||||
|
"value": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.databricks.delta.retentionDurationCheck.enabled": false,
|
||||||
|
"spark.speculation": false
|
||||||
|
},
|
||||||
|
"spark_version": "14.2.x-scala2.11"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -t dev -o json | jq '{resources, variables}'
|
|
@ -0,0 +1,11 @@
|
||||||
|
Error: no value assigned to required variable a. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_a environment variable
|
||||||
|
|
||||||
|
Name: empty${var.a}
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/empty${var.a}/default
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate
|
|
@ -0,0 +1,40 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t env-with-single-variable-override -o json
|
||||||
|
"default-a dev-b"
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t env-with-two-variable-overrides -o json
|
||||||
|
"prod-a prod-b"
|
||||||
|
|
||||||
|
>>> BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json
|
||||||
|
"prod-a env-var-b"
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
||||||
|
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
|
||||||
|
|
||||||
|
Name: test bundle
|
||||||
|
Target: env-missing-a-required-variable-assignment
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/test bundle/env-missing-a-required-variable-assignment
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -t env-using-an-undefined-variable
|
||||||
|
Error: variable c is not defined but is assigned a value
|
||||||
|
|
||||||
|
Name: test bundle
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t env-overrides-lookup -o json
|
||||||
|
{
|
||||||
|
"a": "default-a",
|
||||||
|
"b": "prod-b",
|
||||||
|
"d": "4321",
|
||||||
|
"e": "1234",
|
||||||
|
"f": "9876"
|
||||||
|
}
|
|
@ -0,0 +1,6 @@
|
||||||
|
trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .workspace.profile
|
||||||
|
trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
|
||||||
|
trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
|
||||||
|
trace errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
||||||
|
trace errcode $CLI bundle validate -t env-using-an-undefined-variable
|
||||||
|
trace $CLI bundle validate -t env-overrides-lookup -o json | jq '.variables | map_values(.value)'
|
|
@ -0,0 +1,6 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveVariableReferences
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
root_path: "${bundle.name}/bar"
|
||||||
|
file_path: "${workspace.root_path}/baz"
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"artifact_path": "TestResolveVariableReferences/bar/artifacts",
|
||||||
|
"current_user": {
|
||||||
|
"short_name": "tester",
|
||||||
|
"userName": "tester@databricks.com"
|
||||||
|
},
|
||||||
|
"file_path": "TestResolveVariableReferences/bar/baz",
|
||||||
|
"resource_path": "TestResolveVariableReferences/bar/resources",
|
||||||
|
"root_path": "TestResolveVariableReferences/bar",
|
||||||
|
"state_path": "TestResolveVariableReferences/bar/state"
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .workspace
|
|
@ -0,0 +1,10 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveVariableReferencesToEmptyFields
|
||||||
|
git:
|
||||||
|
branch: ""
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
tags:
|
||||||
|
git_branch: "${bundle.git.branch}"
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"git_branch": ""
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .resources.jobs.job1.tags
|
|
@ -0,0 +1,16 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveComplexVariableReferencesToFields
|
||||||
|
|
||||||
|
variables:
|
||||||
|
cluster:
|
||||||
|
type: "complex"
|
||||||
|
default:
|
||||||
|
node_type_id: "Standard_DS3_v2"
|
||||||
|
num_workers: 2
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- new_cluster:
|
||||||
|
node_type_id: "${var.cluster.node_type_id}"
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"node_type_id": "Standard_DS3_v2"
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .resources.jobs.job1.job_clusters[0].new_cluster
|
|
@ -0,0 +1,16 @@
|
||||||
|
|
||||||
|
>>> BUNDLE_VAR_b=def $CLI bundle validate -o json
|
||||||
|
"abc def"
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate
|
||||||
|
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
|
||||||
|
|
||||||
|
Name: ${var.a} ${var.b}
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: tester@databricks.com
|
||||||
|
Path: /Workspace/Users/tester@databricks.com/.bundle/${var.a} ${var.b}/default
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace BUNDLE_VAR_b=def $CLI bundle validate -o json | jq .bundle.name
|
||||||
|
trace errcode $CLI bundle validate
|
|
@ -0,0 +1,84 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t use-default-variable-values
|
||||||
|
{
|
||||||
|
"pipelines": {
|
||||||
|
"my_pipeline": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"num_workers": 42
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"continuous": true,
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/use-default-variable-values/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "a_string",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t override-string-variable
|
||||||
|
{
|
||||||
|
"pipelines": {
|
||||||
|
"my_pipeline": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"num_workers": 42
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"continuous": true,
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-string-variable/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "overridden_string",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t override-int-variable
|
||||||
|
{
|
||||||
|
"pipelines": {
|
||||||
|
"my_pipeline": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"num_workers": 43
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"continuous": true,
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-int-variable/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "a_string",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t override-both-bool-and-string-variables
|
||||||
|
{
|
||||||
|
"pipelines": {
|
||||||
|
"my_pipeline": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"num_workers": 42
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"continuous": false,
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "overridden_string",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
trace $CLI bundle validate -o json -t use-default-variable-values | jq .resources
|
||||||
|
trace $CLI bundle validate -o json -t override-string-variable | jq .resources
|
||||||
|
trace $CLI bundle validate -o json -t override-int-variable | jq .resources
|
||||||
|
trace $CLI bundle validate -o json -t override-both-bool-and-string-variables | jq .resources
|
|
@ -1,3 +1,5 @@
|
||||||
|
bundle:
|
||||||
|
name: x
|
||||||
variables:
|
variables:
|
||||||
a:
|
a:
|
||||||
b:
|
b:
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"a": "foo",
|
||||||
|
"b": "bar"
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
BUNDLE_VAR_a=foo BUNDLE_VAR_b=bar $CLI bundle validate -o json | jq '.variables | map_values(.value)'
|
|
@ -0,0 +1,143 @@
|
||||||
|
Databricks CLI
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks [command]
|
||||||
|
|
||||||
|
Databricks Workspace
|
||||||
|
fs Filesystem related commands
|
||||||
|
git-credentials Registers personal access token for Databricks to do operations on behalf of the user.
|
||||||
|
repos The Repos API allows users to manage their git repos.
|
||||||
|
secrets The Secrets API allows you to manage secrets, secret scopes, and access permissions.
|
||||||
|
workspace The Workspace API allows you to list, import, export, and delete notebooks and folders.
|
||||||
|
|
||||||
|
Compute
|
||||||
|
cluster-policies You can use cluster policies to control users' ability to configure clusters based on a set of rules.
|
||||||
|
clusters The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
|
||||||
|
global-init-scripts The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.
|
||||||
|
instance-pools Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.
|
||||||
|
instance-profiles The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.
|
||||||
|
libraries The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.
|
||||||
|
policy-compliance-for-clusters The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.
|
||||||
|
policy-families View available policy families.
|
||||||
|
|
||||||
|
Workflows
|
||||||
|
jobs The Jobs API allows you to create, edit, and delete jobs.
|
||||||
|
policy-compliance-for-jobs The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
|
||||||
|
|
||||||
|
Delta Live Tables
|
||||||
|
pipelines The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
|
||||||
|
|
||||||
|
Machine Learning
|
||||||
|
experiments Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.
|
||||||
|
model-registry Note: This API reference documents APIs for the Workspace Model Registry.
|
||||||
|
|
||||||
|
Real-time Serving
|
||||||
|
serving-endpoints The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
|
||||||
|
|
||||||
|
Identity and Access Management
|
||||||
|
current-user This API allows retrieving information about currently authenticated user or service principal.
|
||||||
|
groups Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.
|
||||||
|
permissions Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.
|
||||||
|
service-principals Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
|
||||||
|
users User identities recognized by Databricks and represented by email addresses.
|
||||||
|
|
||||||
|
Databricks SQL
|
||||||
|
alerts The alerts API can be used to perform CRUD operations on alerts.
|
||||||
|
alerts-legacy The alerts API can be used to perform CRUD operations on alerts.
|
||||||
|
dashboards In general, there is little need to modify dashboards using the API.
|
||||||
|
data-sources This API is provided to assist you in making new query objects.
|
||||||
|
queries The queries API can be used to perform CRUD operations on queries.
|
||||||
|
queries-legacy These endpoints are used for CRUD operations on query definitions.
|
||||||
|
query-history A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.
|
||||||
|
warehouses A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.
|
||||||
|
|
||||||
|
Unity Catalog
|
||||||
|
artifact-allowlists In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the allowlist in UC so that users can leverage these artifacts on compute configured with shared access mode.
|
||||||
|
catalogs A catalog is the first layer of Unity Catalog’s three-level namespace.
|
||||||
|
connections Connections allow for creating a connection to an external data source.
|
||||||
|
credentials A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant.
|
||||||
|
external-locations An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.
|
||||||
|
functions Functions implement User-Defined Functions (UDFs) in Unity Catalog.
|
||||||
|
grants In Unity Catalog, data is secure by default.
|
||||||
|
metastores A metastore is the top-level container of objects in Unity Catalog.
|
||||||
|
model-versions Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.
|
||||||
|
online-tables Online tables provide lower latency and higher QPS access to data from Delta tables.
|
||||||
|
quality-monitors A monitor computes and monitors data or model quality metrics for a table over time.
|
||||||
|
registered-models Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.
|
||||||
|
resource-quotas Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.
|
||||||
|
schemas A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.
|
||||||
|
storage-credentials A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.
|
||||||
|
system-schemas A system schema is a schema that lives within the system catalog.
|
||||||
|
table-constraints Primary key and foreign key constraints encode relationships between fields in tables.
|
||||||
|
tables A table resides in the third layer of Unity Catalog’s three-level namespace.
|
||||||
|
temporary-table-credentials Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks.
|
||||||
|
volumes Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.
|
||||||
|
workspace-bindings A securable in Databricks can be configured as __OPEN__ or __ISOLATED__.
|
||||||
|
|
||||||
|
Delta Sharing
|
||||||
|
providers A data provider is an object representing the organization in the real world who shares the data.
|
||||||
|
recipient-activation The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of TOKEN.
|
||||||
|
recipients A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.
|
||||||
|
shares A share is a container instantiated with :method:shares/create.
|
||||||
|
|
||||||
|
Settings
|
||||||
|
ip-access-lists IP Access List enables admins to configure IP access lists.
|
||||||
|
notification-destinations The notification destinations API lets you programmatically manage a workspace's notification destinations.
|
||||||
|
settings Workspace Settings API allows users to manage settings at the workspace level.
|
||||||
|
token-management Enables administrators to get all tokens and delete tokens for other users.
|
||||||
|
tokens The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.
|
||||||
|
workspace-conf This API allows updating known workspace settings for advanced users.
|
||||||
|
|
||||||
|
Developer Tools
|
||||||
|
bundle Databricks Asset Bundles let you express data/AI/analytics projects as code.
|
||||||
|
sync Synchronize a local directory to a workspace directory
|
||||||
|
|
||||||
|
Vector Search
|
||||||
|
vector-search-endpoints **Endpoint**: Represents the compute resources to host vector search indexes.
|
||||||
|
vector-search-indexes **Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries.
|
||||||
|
|
||||||
|
Dashboards
|
||||||
|
lakeview These APIs provide specific management operations for Lakeview dashboards.
|
||||||
|
|
||||||
|
Marketplace
|
||||||
|
consumer-fulfillments Fulfillments are entities that allow consumers to preview installations.
|
||||||
|
consumer-installations Installations are entities that allow consumers to interact with Databricks Marketplace listings.
|
||||||
|
consumer-listings Listings are the core entities in the Marketplace.
|
||||||
|
consumer-personalization-requests Personalization Requests allow customers to interact with the individualized Marketplace listing flow.
|
||||||
|
consumer-providers Providers are the entities that publish listings to the Marketplace.
|
||||||
|
provider-exchange-filters Marketplace exchanges filters curate which groups can access an exchange.
|
||||||
|
provider-exchanges Marketplace exchanges allow providers to share their listings with a curated set of customers.
|
||||||
|
provider-files Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons.
|
||||||
|
provider-listings Listings are the core entities in the Marketplace.
|
||||||
|
provider-personalization-requests Personalization requests are an alternate to instantly available listings.
|
||||||
|
provider-provider-analytics-dashboards Manage templated analytics solution for providers.
|
||||||
|
provider-providers Providers are entities that manage assets in Marketplace.
|
||||||
|
|
||||||
|
Apps
|
||||||
|
apps Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.
|
||||||
|
apps Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.
|
||||||
|
|
||||||
|
Clean Rooms
|
||||||
|
clean-room-assets Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.
|
||||||
|
clean-room-task-runs Clean room task runs are the executions of notebooks in a clean room.
|
||||||
|
clean-rooms A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.
|
||||||
|
|
||||||
|
Additional Commands:
|
||||||
|
account Databricks Account Commands
|
||||||
|
api Perform Databricks API call
|
||||||
|
auth Authentication related commands
|
||||||
|
completion Generate the autocompletion script for the specified shell
|
||||||
|
configure Configure authentication
|
||||||
|
help Help about any command
|
||||||
|
labs Manage Databricks Labs installations
|
||||||
|
version Retrieve information about the current version of this CLI
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-h, --help help for databricks
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
-v, --version version for databricks
|
||||||
|
|
||||||
|
Use "databricks [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI
|
|
@ -0,0 +1 @@
|
||||||
|
rm -fr .databricks .gitignore
|
|
@ -0,0 +1,36 @@
|
||||||
|
# Prevent CLI from downloading terraform in each test:
|
||||||
|
export DATABRICKS_TF_EXEC_PATH=/tmp/
|
||||||
|
|
||||||
|
errcode() {
|
||||||
|
# Temporarily disable 'set -e' to prevent the script from exiting on error
|
||||||
|
set +e
|
||||||
|
# Execute the provided command with all arguments
|
||||||
|
"$@"
|
||||||
|
local exit_code=$?
|
||||||
|
# Re-enable 'set -e' if it was previously set
|
||||||
|
set -e
|
||||||
|
>&2 printf "\nExit code: $exit_code\n"
|
||||||
|
}
|
||||||
|
|
||||||
|
trace() {
|
||||||
|
>&2 printf "\n>>> %s\n" "$*"
|
||||||
|
|
||||||
|
if [[ "$1" == *"="* ]]; then
|
||||||
|
# If the first argument contains '=', collect all env vars
|
||||||
|
local env_vars=()
|
||||||
|
while [[ "$1" == *"="* ]]; do
|
||||||
|
env_vars+=("$1")
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
# Export environment variables in a subshell and execute the command
|
||||||
|
(
|
||||||
|
export "${env_vars[@]}"
|
||||||
|
"$@"
|
||||||
|
)
|
||||||
|
else
|
||||||
|
# Execute the command normally
|
||||||
|
"$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
return $?
|
||||||
|
}
|
|
@ -0,0 +1,129 @@
|
||||||
|
package acceptance_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"net"
|
||||||
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/workspace"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TestServer struct {
|
||||||
|
*httptest.Server
|
||||||
|
Mux *http.ServeMux
|
||||||
|
Port int
|
||||||
|
}
|
||||||
|
|
||||||
|
type HandlerFunc func(r *http.Request) (any, error)
|
||||||
|
|
||||||
|
func NewTestServer() *TestServer {
|
||||||
|
mux := http.NewServeMux()
|
||||||
|
server := httptest.NewServer(mux)
|
||||||
|
port := server.Listener.Addr().(*net.TCPAddr).Port
|
||||||
|
|
||||||
|
return &TestServer{
|
||||||
|
Server: server,
|
||||||
|
Mux: mux,
|
||||||
|
Port: port,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *TestServer) Handle(pattern string, handler HandlerFunc) {
|
||||||
|
s.Mux.HandleFunc(pattern, func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
resp, err := handler(r)
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
var respBytes []byte
|
||||||
|
|
||||||
|
respString, ok := resp.(string)
|
||||||
|
if ok {
|
||||||
|
respBytes = []byte(respString)
|
||||||
|
} else {
|
||||||
|
respBytes, err = json.MarshalIndent(resp, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := w.Write(respBytes); err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func StartServer(t *testing.T) *TestServer {
|
||||||
|
server := NewTestServer()
|
||||||
|
t.Cleanup(func() {
|
||||||
|
server.Close()
|
||||||
|
})
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
func AddHandlers(server *TestServer) {
|
||||||
|
server.Handle("/api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
|
||||||
|
return compute.ListPoliciesResponse{
|
||||||
|
Policies: []compute.Policy{
|
||||||
|
{
|
||||||
|
PolicyId: "5678",
|
||||||
|
Name: "wrong-cluster-policy",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
PolicyId: "9876",
|
||||||
|
Name: "some-test-cluster-policy",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
|
||||||
|
return compute.ListInstancePools{
|
||||||
|
InstancePools: []compute.InstancePoolAndStats{
|
||||||
|
{
|
||||||
|
InstancePoolName: "some-test-instance-pool",
|
||||||
|
InstancePoolId: "1234",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.1/clusters/list", func(r *http.Request) (any, error) {
|
||||||
|
return compute.ListClustersResponse{
|
||||||
|
Clusters: []compute.ClusterDetails{
|
||||||
|
{
|
||||||
|
ClusterName: "some-test-cluster",
|
||||||
|
ClusterId: "4321",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ClusterName: "some-other-cluster",
|
||||||
|
ClusterId: "9876",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
|
||||||
|
return iam.User{
|
||||||
|
UserName: "tester@databricks.com",
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
|
||||||
|
return workspace.ObjectInfo{
|
||||||
|
ObjectId: 1001,
|
||||||
|
ObjectType: "DIRECTORY",
|
||||||
|
Path: "",
|
||||||
|
ResourceId: "1001",
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
}
|
|
@ -2,7 +2,6 @@ package artifacts
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
@ -88,16 +87,16 @@ func TestExpandGlobs_InvalidPattern(t *testing.T) {
|
||||||
))
|
))
|
||||||
|
|
||||||
assert.Len(t, diags, 4)
|
assert.Len(t, diags, 4)
|
||||||
assert.Equal(t, fmt.Sprintf("%s: syntax error in pattern", filepath.Clean("a[.txt")), diags[0].Summary)
|
assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary)
|
||||||
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[0].Locations[0].File)
|
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[0].Locations[0].File)
|
||||||
assert.Equal(t, "artifacts.test.files[0].source", diags[0].Paths[0].String())
|
assert.Equal(t, "artifacts.test.files[0].source", diags[0].Paths[0].String())
|
||||||
assert.Equal(t, fmt.Sprintf("%s: syntax error in pattern", filepath.Clean("a[.txt")), diags[1].Summary)
|
assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[1].Summary)
|
||||||
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[1].Locations[0].File)
|
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[1].Locations[0].File)
|
||||||
assert.Equal(t, "artifacts.test.files[1].source", diags[1].Paths[0].String())
|
assert.Equal(t, "artifacts.test.files[1].source", diags[1].Paths[0].String())
|
||||||
assert.Equal(t, fmt.Sprintf("%s: syntax error in pattern", filepath.Clean("../a[.txt")), diags[2].Summary)
|
assert.Equal(t, filepath.Clean("../a[.txt")+": syntax error in pattern", diags[2].Summary)
|
||||||
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[2].Locations[0].File)
|
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[2].Locations[0].File)
|
||||||
assert.Equal(t, "artifacts.test.files[2].source", diags[2].Paths[0].String())
|
assert.Equal(t, "artifacts.test.files[2].source", diags[2].Paths[0].String())
|
||||||
assert.Equal(t, fmt.Sprintf("%s: syntax error in pattern", filepath.Clean("subdir/a[.txt")), diags[3].Summary)
|
assert.Equal(t, filepath.Clean("subdir/a[.txt")+": syntax error in pattern", diags[3].Summary)
|
||||||
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[3].Locations[0].File)
|
assert.Equal(t, filepath.Join(tmpDir, "databricks.yml"), diags[3].Locations[0].File)
|
||||||
assert.Equal(t, "artifacts.test.files[3].source", diags[3].Paths[0].String())
|
assert.Equal(t, "artifacts.test.files[3].source", diags[3].Paths[0].String())
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,7 +32,7 @@ func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
//)
|
//)
|
||||||
|
|
||||||
py := python.GetExecutable()
|
py := python.GetExecutable()
|
||||||
artifact.BuildCommand = fmt.Sprintf(`%s setup.py bdist_wheel`, py)
|
artifact.BuildCommand = py + " setup.py bdist_wheel"
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,6 +8,7 @@ package bundle
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
@ -234,7 +235,7 @@ func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) {
|
||||||
// we call into from this bundle context.
|
// we call into from this bundle context.
|
||||||
func (b *Bundle) AuthEnv() (map[string]string, error) {
|
func (b *Bundle) AuthEnv() (map[string]string, error) {
|
||||||
if b.client == nil {
|
if b.client == nil {
|
||||||
return nil, fmt.Errorf("workspace client not initialized yet")
|
return nil, errors.New("workspace client not initialized yet")
|
||||||
}
|
}
|
||||||
|
|
||||||
cfg := b.client.Config
|
cfg := b.client.Config
|
||||||
|
|
|
@ -2,7 +2,7 @@ package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"errors"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/exec"
|
"github.com/databricks/cli/libs/exec"
|
||||||
)
|
)
|
||||||
|
@ -37,7 +37,7 @@ type Artifact struct {
|
||||||
|
|
||||||
func (a *Artifact) Build(ctx context.Context) ([]byte, error) {
|
func (a *Artifact) Build(ctx context.Context) ([]byte, error) {
|
||||||
if a.BuildCommand == "" {
|
if a.BuildCommand == "" {
|
||||||
return nil, fmt.Errorf("no build property defined")
|
return nil, errors.New("no build property defined")
|
||||||
}
|
}
|
||||||
|
|
||||||
var e *exec.Executor
|
var e *exec.Executor
|
||||||
|
|
|
@ -27,9 +27,33 @@ type Experimental struct {
|
||||||
// PyDABs determines whether to load the 'databricks-pydabs' package.
|
// PyDABs determines whether to load the 'databricks-pydabs' package.
|
||||||
//
|
//
|
||||||
// PyDABs allows to define bundle configuration using Python.
|
// PyDABs allows to define bundle configuration using Python.
|
||||||
|
// PyDABs is deprecated use Python instead.
|
||||||
PyDABs PyDABs `json:"pydabs,omitempty"`
|
PyDABs PyDABs `json:"pydabs,omitempty"`
|
||||||
|
|
||||||
|
// Python configures loading of Python code defined with 'databricks-bundles' package.
|
||||||
|
Python Python `json:"python,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Python struct {
|
||||||
|
// Resources contains a list of fully qualified function paths to load resources
|
||||||
|
// defined in Python code.
|
||||||
|
//
|
||||||
|
// Example: ["my_project.resources:load_resources"]
|
||||||
|
Resources []string `json:"resources"`
|
||||||
|
|
||||||
|
// Mutators contains a list of fully qualified function paths to mutator functions.
|
||||||
|
//
|
||||||
|
// Example: ["my_project.mutators:add_default_cluster"]
|
||||||
|
Mutators []string `json:"mutators"`
|
||||||
|
|
||||||
|
// VEnvPath is path to the virtual environment.
|
||||||
|
//
|
||||||
|
// If enabled, Python code will execute within this environment. If disabled,
|
||||||
|
// it defaults to using the Python interpreter available in the current shell.
|
||||||
|
VEnvPath string `json:"venv_path,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// PyDABs is deprecated use Python instead
|
||||||
type PyDABs struct {
|
type PyDABs struct {
|
||||||
// Enabled is a flag to enable the feature.
|
// Enabled is a flag to enable the feature.
|
||||||
Enabled bool `json:"enabled,omitempty"`
|
Enabled bool `json:"enabled,omitempty"`
|
||||||
|
|
|
@ -9,7 +9,6 @@ import (
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/libs/dbr"
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
"github.com/databricks/cli/libs/textutil"
|
"github.com/databricks/cli/libs/textutil"
|
||||||
|
@ -222,27 +221,6 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
|
||||||
dashboard.DisplayName = prefix + dashboard.DisplayName
|
dashboard.DisplayName = prefix + dashboard.DisplayName
|
||||||
}
|
}
|
||||||
|
|
||||||
if config.IsExplicitlyEnabled((b.Config.Presets.SourceLinkedDeployment)) {
|
|
||||||
isDatabricksWorkspace := dbr.RunsOnRuntime(ctx) && strings.HasPrefix(b.SyncRootPath, "/Workspace/")
|
|
||||||
if !isDatabricksWorkspace {
|
|
||||||
target := b.Config.Bundle.Target
|
|
||||||
path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("presets"), dyn.Key("source_linked_deployment"))
|
|
||||||
diags = diags.Append(
|
|
||||||
diag.Diagnostic{
|
|
||||||
Severity: diag.Warning,
|
|
||||||
Summary: "source-linked deployment is available only in the Databricks Workspace",
|
|
||||||
Paths: []dyn.Path{
|
|
||||||
path,
|
|
||||||
},
|
|
||||||
Locations: b.Config.GetLocations(path[2:].String()),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
disabled := false
|
|
||||||
b.Config.Presets.SourceLinkedDeployment = &disabled
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return diags
|
return diags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,16 +2,12 @@ package mutator_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"runtime"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/mutator"
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/cli/bundle/internal/bundletest"
|
|
||||||
"github.com/databricks/cli/libs/dbr"
|
|
||||||
"github.com/databricks/cli/libs/dyn"
|
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
@ -398,87 +394,3 @@ func TestApplyPresetsResourceNotDefined(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
|
|
||||||
if runtime.GOOS == "windows" {
|
|
||||||
t.Skip("this test is not applicable on Windows because source-linked mode works only in the Databricks Workspace")
|
|
||||||
}
|
|
||||||
|
|
||||||
testContext := context.Background()
|
|
||||||
enabled := true
|
|
||||||
disabled := false
|
|
||||||
workspacePath := "/Workspace/user.name@company.com"
|
|
||||||
|
|
||||||
tests := []struct {
|
|
||||||
bundlePath string
|
|
||||||
ctx context.Context
|
|
||||||
name string
|
|
||||||
initialValue *bool
|
|
||||||
expectedValue *bool
|
|
||||||
expectedWarning string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "preset enabled, bundle in Workspace, databricks runtime",
|
|
||||||
bundlePath: workspacePath,
|
|
||||||
ctx: dbr.MockRuntime(testContext, true),
|
|
||||||
initialValue: &enabled,
|
|
||||||
expectedValue: &enabled,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "preset enabled, bundle not in Workspace, databricks runtime",
|
|
||||||
bundlePath: "/Users/user.name@company.com",
|
|
||||||
ctx: dbr.MockRuntime(testContext, true),
|
|
||||||
initialValue: &enabled,
|
|
||||||
expectedValue: &disabled,
|
|
||||||
expectedWarning: "source-linked deployment is available only in the Databricks Workspace",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "preset enabled, bundle in Workspace, not databricks runtime",
|
|
||||||
bundlePath: workspacePath,
|
|
||||||
ctx: dbr.MockRuntime(testContext, false),
|
|
||||||
initialValue: &enabled,
|
|
||||||
expectedValue: &disabled,
|
|
||||||
expectedWarning: "source-linked deployment is available only in the Databricks Workspace",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "preset disabled, bundle in Workspace, databricks runtime",
|
|
||||||
bundlePath: workspacePath,
|
|
||||||
ctx: dbr.MockRuntime(testContext, true),
|
|
||||||
initialValue: &disabled,
|
|
||||||
expectedValue: &disabled,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "preset nil, bundle in Workspace, databricks runtime",
|
|
||||||
bundlePath: workspacePath,
|
|
||||||
ctx: dbr.MockRuntime(testContext, true),
|
|
||||||
initialValue: nil,
|
|
||||||
expectedValue: nil,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
SyncRootPath: tt.bundlePath,
|
|
||||||
Config: config.Root{
|
|
||||||
Presets: config.Presets{
|
|
||||||
SourceLinkedDeployment: tt.initialValue,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
bundletest.SetLocation(b, "presets.source_linked_deployment", []dyn.Location{{File: "databricks.yml"}})
|
|
||||||
diags := bundle.Apply(tt.ctx, b, mutator.ApplyPresets())
|
|
||||||
if diags.HasError() {
|
|
||||||
t.Fatalf("unexpected error: %v", diags)
|
|
||||||
}
|
|
||||||
|
|
||||||
if tt.expectedWarning != "" {
|
|
||||||
require.Equal(t, tt.expectedWarning, diags[0].Summary)
|
|
||||||
require.NotEmpty(t, diags[0].Locations)
|
|
||||||
}
|
|
||||||
|
|
||||||
require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
package mutator
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/libs/dbr"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
)
|
||||||
|
|
||||||
|
type applySourceLinkedDeploymentPreset struct{}
|
||||||
|
|
||||||
|
// Apply source-linked deployment preset
|
||||||
|
func ApplySourceLinkedDeploymentPreset() *applySourceLinkedDeploymentPreset {
|
||||||
|
return &applySourceLinkedDeploymentPreset{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *applySourceLinkedDeploymentPreset) Name() string {
|
||||||
|
return "ApplySourceLinkedDeploymentPreset"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *applySourceLinkedDeploymentPreset) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
if config.IsExplicitlyDisabled(b.Config.Presets.SourceLinkedDeployment) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
isDatabricksWorkspace := dbr.RunsOnRuntime(ctx) && strings.HasPrefix(b.SyncRootPath, "/Workspace/")
|
||||||
|
target := b.Config.Bundle.Target
|
||||||
|
|
||||||
|
if config.IsExplicitlyEnabled((b.Config.Presets.SourceLinkedDeployment)) {
|
||||||
|
if !isDatabricksWorkspace {
|
||||||
|
path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("presets"), dyn.Key("source_linked_deployment"))
|
||||||
|
diags = diags.Append(
|
||||||
|
diag.Diagnostic{
|
||||||
|
Severity: diag.Warning,
|
||||||
|
Summary: "source-linked deployment is available only in the Databricks Workspace",
|
||||||
|
Paths: []dyn.Path{
|
||||||
|
path,
|
||||||
|
},
|
||||||
|
Locations: b.Config.GetLocations(path[2:].String()),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
disabled := false
|
||||||
|
b.Config.Presets.SourceLinkedDeployment = &disabled
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if isDatabricksWorkspace && b.Config.Bundle.Mode == config.Development {
|
||||||
|
enabled := true
|
||||||
|
b.Config.Presets.SourceLinkedDeployment = &enabled
|
||||||
|
}
|
||||||
|
|
||||||
|
if b.Config.Workspace.FilePath != "" && config.IsExplicitlyEnabled(b.Config.Presets.SourceLinkedDeployment) {
|
||||||
|
path := dyn.NewPath(dyn.Key("targets"), dyn.Key(target), dyn.Key("workspace"), dyn.Key("file_path"))
|
||||||
|
|
||||||
|
diags = diags.Append(
|
||||||
|
diag.Diagnostic{
|
||||||
|
Severity: diag.Warning,
|
||||||
|
Summary: "workspace.file_path setting will be ignored in source-linked deployment mode",
|
||||||
|
Paths: []dyn.Path{
|
||||||
|
path[2:],
|
||||||
|
},
|
||||||
|
Locations: b.Config.GetLocations(path[2:].String()),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
return diags
|
||||||
|
}
|
|
@ -0,0 +1,122 @@
|
||||||
|
package mutator_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"runtime"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
|
"github.com/databricks/cli/libs/dbr"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("this test is not applicable on Windows because source-linked mode works only in the Databricks Workspace")
|
||||||
|
}
|
||||||
|
|
||||||
|
testContext := context.Background()
|
||||||
|
enabled := true
|
||||||
|
disabled := false
|
||||||
|
workspacePath := "/Workspace/user.name@company.com"
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
ctx context.Context
|
||||||
|
mutateBundle func(b *bundle.Bundle)
|
||||||
|
initialValue *bool
|
||||||
|
expectedValue *bool
|
||||||
|
expectedWarning string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "preset enabled, bundle in Workspace, databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
initialValue: &enabled,
|
||||||
|
expectedValue: &enabled,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset enabled, bundle not in Workspace, databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
mutateBundle: func(b *bundle.Bundle) {
|
||||||
|
b.SyncRootPath = "/Users/user.name@company.com"
|
||||||
|
},
|
||||||
|
initialValue: &enabled,
|
||||||
|
expectedValue: &disabled,
|
||||||
|
expectedWarning: "source-linked deployment is available only in the Databricks Workspace",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset enabled, bundle in Workspace, not databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, false),
|
||||||
|
initialValue: &enabled,
|
||||||
|
expectedValue: &disabled,
|
||||||
|
expectedWarning: "source-linked deployment is available only in the Databricks Workspace",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset disabled, bundle in Workspace, databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
initialValue: &disabled,
|
||||||
|
expectedValue: &disabled,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset nil, bundle in Workspace, databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
initialValue: nil,
|
||||||
|
expectedValue: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset nil, dev mode true, bundle in Workspace, databricks runtime",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
mutateBundle: func(b *bundle.Bundle) {
|
||||||
|
b.Config.Bundle.Mode = config.Development
|
||||||
|
},
|
||||||
|
initialValue: nil,
|
||||||
|
expectedValue: &enabled,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "preset enabled, workspace.file_path is defined by user",
|
||||||
|
ctx: dbr.MockRuntime(testContext, true),
|
||||||
|
mutateBundle: func(b *bundle.Bundle) {
|
||||||
|
b.Config.Workspace.FilePath = "file_path"
|
||||||
|
},
|
||||||
|
initialValue: &enabled,
|
||||||
|
expectedValue: &enabled,
|
||||||
|
expectedWarning: "workspace.file_path setting will be ignored in source-linked deployment mode",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
SyncRootPath: workspacePath,
|
||||||
|
Config: config.Root{
|
||||||
|
Presets: config.Presets{
|
||||||
|
SourceLinkedDeployment: tt.initialValue,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.mutateBundle != nil {
|
||||||
|
tt.mutateBundle(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, "presets.source_linked_deployment", []dyn.Location{{File: "databricks.yml"}})
|
||||||
|
bundletest.SetLocation(b, "workspace.file_path", []dyn.Location{{File: "databricks.yml"}})
|
||||||
|
|
||||||
|
diags := bundle.Apply(tt.ctx, b, mutator.ApplySourceLinkedDeploymentPreset())
|
||||||
|
if diags.HasError() {
|
||||||
|
t.Fatalf("unexpected error: %v", diags)
|
||||||
|
}
|
||||||
|
|
||||||
|
if tt.expectedWarning != "" {
|
||||||
|
require.Equal(t, tt.expectedWarning, diags[0].Summary)
|
||||||
|
require.NotEmpty(t, diags[0].Locations)
|
||||||
|
}
|
||||||
|
|
||||||
|
require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
|
@ -2,7 +2,6 @@ package mutator
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -33,7 +32,7 @@ func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag.
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.HasPrefix(root, "~/") {
|
if strings.HasPrefix(root, "~/") {
|
||||||
home := fmt.Sprintf("/Workspace/Users/%s", currentUser.UserName)
|
home := "/Workspace/Users/" + currentUser.UserName
|
||||||
b.Config.Workspace.RootPath = path.Join(home, root[2:])
|
b.Config.Workspace.RootPath = path.Join(home, root[2:])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,7 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return dyn.NewValue(fmt.Sprintf("/Workspace%s", path), v.Locations()), nil
|
return dyn.NewValue("/Workspace"+path, v.Locations()), nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return dyn.InvalidValue, err
|
return dyn.InvalidValue, err
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/libs/dbr"
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
"github.com/databricks/cli/libs/iamutil"
|
"github.com/databricks/cli/libs/iamutil"
|
||||||
|
@ -58,14 +57,6 @@ func transformDevelopmentMode(ctx context.Context, b *bundle.Bundle) {
|
||||||
t.TriggerPauseStatus = config.Paused
|
t.TriggerPauseStatus = config.Paused
|
||||||
}
|
}
|
||||||
|
|
||||||
if !config.IsExplicitlyDisabled(t.SourceLinkedDeployment) {
|
|
||||||
isInWorkspace := strings.HasPrefix(b.SyncRootPath, "/Workspace/")
|
|
||||||
if isInWorkspace && dbr.RunsOnRuntime(ctx) {
|
|
||||||
enabled := true
|
|
||||||
t.SourceLinkedDeployment = &enabled
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !config.IsExplicitlyDisabled(t.PipelinesDevelopment) {
|
if !config.IsExplicitlyDisabled(t.PipelinesDevelopment) {
|
||||||
enabled := true
|
enabled := true
|
||||||
t.PipelinesDevelopment = &enabled
|
t.PipelinesDevelopment = &enabled
|
||||||
|
|
|
@ -3,14 +3,12 @@ package mutator
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"reflect"
|
"reflect"
|
||||||
"runtime"
|
|
||||||
"slices"
|
"slices"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
"github.com/databricks/cli/libs/dbr"
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/tags"
|
"github.com/databricks/cli/libs/tags"
|
||||||
"github.com/databricks/cli/libs/vfs"
|
"github.com/databricks/cli/libs/vfs"
|
||||||
|
@ -540,32 +538,3 @@ func TestPipelinesDevelopmentDisabled(t *testing.T) {
|
||||||
|
|
||||||
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSourceLinkedDeploymentEnabled(t *testing.T) {
|
|
||||||
b, diags := processSourceLinkedBundle(t, true)
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
assert.True(t, *b.Config.Presets.SourceLinkedDeployment)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSourceLinkedDeploymentDisabled(t *testing.T) {
|
|
||||||
b, diags := processSourceLinkedBundle(t, false)
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
assert.False(t, *b.Config.Presets.SourceLinkedDeployment)
|
|
||||||
}
|
|
||||||
|
|
||||||
func processSourceLinkedBundle(t *testing.T, presetEnabled bool) (*bundle.Bundle, diag.Diagnostics) {
|
|
||||||
if runtime.GOOS == "windows" {
|
|
||||||
t.Skip("this test is not applicable on Windows because source-linked mode works only in the Databricks Workspace")
|
|
||||||
}
|
|
||||||
|
|
||||||
b := mockBundle(config.Development)
|
|
||||||
|
|
||||||
workspacePath := "/Workspace/lennart@company.com/"
|
|
||||||
b.SyncRootPath = workspacePath
|
|
||||||
b.Config.Presets.SourceLinkedDeployment = &presetEnabled
|
|
||||||
|
|
||||||
ctx := dbr.MockRuntime(context.Background(), true)
|
|
||||||
m := bundle.Seq(ProcessTargetMode(), ApplyPresets())
|
|
||||||
diags := bundle.Apply(ctx, b, m)
|
|
||||||
return b, diags
|
|
||||||
}
|
|
||||||
|
|
|
@ -9,6 +9,7 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"reflect"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/databricks/databricks-sdk-go/logger"
|
"github.com/databricks/databricks-sdk-go/logger"
|
||||||
|
@ -40,6 +41,8 @@ const (
|
||||||
// We also open for possibility of appending other sections of bundle configuration,
|
// We also open for possibility of appending other sections of bundle configuration,
|
||||||
// for example, adding new variables. However, this is not supported yet, and CLI rejects
|
// for example, adding new variables. However, this is not supported yet, and CLI rejects
|
||||||
// such changes.
|
// such changes.
|
||||||
|
//
|
||||||
|
// Deprecated, left for backward-compatibility with PyDABs.
|
||||||
PythonMutatorPhaseLoad phase = "load"
|
PythonMutatorPhaseLoad phase = "load"
|
||||||
|
|
||||||
// PythonMutatorPhaseInit is the phase after bundle configuration was loaded, and
|
// PythonMutatorPhaseInit is the phase after bundle configuration was loaded, and
|
||||||
|
@ -59,7 +62,46 @@ const (
|
||||||
// PyDABs can output YAML containing references to variables, and CLI should resolve them.
|
// PyDABs can output YAML containing references to variables, and CLI should resolve them.
|
||||||
//
|
//
|
||||||
// Existing resources can't be removed, and CLI rejects such changes.
|
// Existing resources can't be removed, and CLI rejects such changes.
|
||||||
|
//
|
||||||
|
// Deprecated, left for backward-compatibility with PyDABs.
|
||||||
PythonMutatorPhaseInit phase = "init"
|
PythonMutatorPhaseInit phase = "init"
|
||||||
|
|
||||||
|
// PythonMutatorPhaseLoadResources is the phase in which YAML configuration was loaded.
|
||||||
|
//
|
||||||
|
// At this stage, we execute Python code to load resources defined in Python.
|
||||||
|
//
|
||||||
|
// During this process, Python code can access:
|
||||||
|
// - selected deployment target
|
||||||
|
// - bundle variable values
|
||||||
|
// - variables provided through CLI argument or environment variables
|
||||||
|
//
|
||||||
|
// The following is not available:
|
||||||
|
// - variables referencing other variables are in unresolved format
|
||||||
|
//
|
||||||
|
// Python code can output YAML referencing variables, and CLI should resolve them.
|
||||||
|
//
|
||||||
|
// Existing resources can't be removed or modified, and CLI rejects such changes.
|
||||||
|
// While it's called 'load_resources', this phase is executed in 'init' phase of mutator pipeline.
|
||||||
|
PythonMutatorPhaseLoadResources phase = "load_resources"
|
||||||
|
|
||||||
|
// PythonMutatorPhaseApplyMutators is the phase in which resources defined in YAML or Python
|
||||||
|
// are already loaded.
|
||||||
|
//
|
||||||
|
// At this stage, we execute Python code to mutate resources defined in YAML or Python.
|
||||||
|
//
|
||||||
|
// During this process, Python code can access:
|
||||||
|
// - selected deployment target
|
||||||
|
// - bundle variable values
|
||||||
|
// - variables provided through CLI argument or environment variables
|
||||||
|
//
|
||||||
|
// The following is not available:
|
||||||
|
// - variables referencing other variables are in unresolved format
|
||||||
|
//
|
||||||
|
// Python code can output YAML referencing variables, and CLI should resolve them.
|
||||||
|
//
|
||||||
|
// Resources can't be added or removed, and CLI rejects such changes. Python code is
|
||||||
|
// allowed to modify existing resources, but not other parts of bundle configuration.
|
||||||
|
PythonMutatorPhaseApplyMutators phase = "apply_mutators"
|
||||||
)
|
)
|
||||||
|
|
||||||
type pythonMutator struct {
|
type pythonMutator struct {
|
||||||
|
@ -76,18 +118,64 @@ func (m *pythonMutator) Name() string {
|
||||||
return fmt.Sprintf("PythonMutator(%s)", m.phase)
|
return fmt.Sprintf("PythonMutator(%s)", m.phase)
|
||||||
}
|
}
|
||||||
|
|
||||||
func getExperimental(b *bundle.Bundle) config.Experimental {
|
// opts is a common structure for deprecated PyDABs and upcoming Python
|
||||||
if b.Config.Experimental == nil {
|
// configuration sections
|
||||||
return config.Experimental{}
|
type opts struct {
|
||||||
|
enabled bool
|
||||||
|
|
||||||
|
venvPath string
|
||||||
}
|
}
|
||||||
|
|
||||||
return *b.Config.Experimental
|
// getOpts adapts deprecated PyDABs and upcoming Python configuration
|
||||||
|
// into a common structure.
|
||||||
|
func getOpts(b *bundle.Bundle, phase phase) (opts, error) {
|
||||||
|
experimental := b.Config.Experimental
|
||||||
|
if experimental == nil {
|
||||||
|
return opts{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// using reflect.DeepEquals in case we add more fields
|
||||||
|
pydabsEnabled := !reflect.DeepEqual(experimental.PyDABs, config.PyDABs{})
|
||||||
|
pythonEnabled := !reflect.DeepEqual(experimental.Python, config.Python{})
|
||||||
|
|
||||||
|
if pydabsEnabled && pythonEnabled {
|
||||||
|
return opts{}, errors.New("both experimental/pydabs and experimental/python are enabled, only one can be enabled")
|
||||||
|
} else if pydabsEnabled {
|
||||||
|
if !experimental.PyDABs.Enabled {
|
||||||
|
return opts{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't execute for phases for 'python' section
|
||||||
|
if phase == PythonMutatorPhaseInit || phase == PythonMutatorPhaseLoad {
|
||||||
|
return opts{
|
||||||
|
enabled: true,
|
||||||
|
venvPath: experimental.PyDABs.VEnvPath,
|
||||||
|
}, nil
|
||||||
|
} else {
|
||||||
|
return opts{}, nil
|
||||||
|
}
|
||||||
|
} else if pythonEnabled {
|
||||||
|
// don't execute for phases for 'pydabs' section
|
||||||
|
if phase == PythonMutatorPhaseLoadResources || phase == PythonMutatorPhaseApplyMutators {
|
||||||
|
return opts{
|
||||||
|
enabled: true,
|
||||||
|
venvPath: experimental.Python.VEnvPath,
|
||||||
|
}, nil
|
||||||
|
} else {
|
||||||
|
return opts{}, nil
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return opts{}, nil
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
experimental := getExperimental(b)
|
opts, err := getOpts(b, m.phase)
|
||||||
|
if err != nil {
|
||||||
|
return diag.Errorf("failed to apply python mutator: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
if !experimental.PyDABs.Enabled {
|
if !opts.enabled {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -95,8 +183,8 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
|
||||||
var mutateDiags diag.Diagnostics
|
var mutateDiags diag.Diagnostics
|
||||||
mutateDiagsHasError := errors.New("unexpected error")
|
mutateDiagsHasError := errors.New("unexpected error")
|
||||||
|
|
||||||
err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) {
|
err = b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) {
|
||||||
pythonPath, err := detectExecutable(ctx, experimental.PyDABs.VEnvPath)
|
pythonPath, err := detectExecutable(ctx, opts.venvPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err)
|
return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err)
|
||||||
}
|
}
|
||||||
|
@ -137,7 +225,7 @@ func createCacheDir(ctx context.Context) (string, error) {
|
||||||
// support the same env variable as in b.CacheDir
|
// support the same env variable as in b.CacheDir
|
||||||
if tempDir, exists := env.TempDir(ctx); exists {
|
if tempDir, exists := env.TempDir(ctx); exists {
|
||||||
// use 'default' as target name
|
// use 'default' as target name
|
||||||
cacheDir := filepath.Join(tempDir, "default", "pydabs")
|
cacheDir := filepath.Join(tempDir, "default", "python")
|
||||||
|
|
||||||
err := os.MkdirAll(cacheDir, 0o700)
|
err := os.MkdirAll(cacheDir, 0o700)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -147,7 +235,7 @@ func createCacheDir(ctx context.Context) (string, error) {
|
||||||
return cacheDir, nil
|
return cacheDir, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return os.MkdirTemp("", "-pydabs")
|
return os.MkdirTemp("", "-python")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
|
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
|
||||||
|
@ -203,7 +291,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath
|
||||||
}
|
}
|
||||||
|
|
||||||
// process can fail without reporting errors in diagnostics file or creating it, for instance,
|
// process can fail without reporting errors in diagnostics file or creating it, for instance,
|
||||||
// venv doesn't have PyDABs library installed
|
// venv doesn't have 'databricks-bundles' library installed
|
||||||
if processErr != nil {
|
if processErr != nil {
|
||||||
diagnostic := diag.Diagnostic{
|
diagnostic := diag.Diagnostic{
|
||||||
Severity: diag.Error,
|
Severity: diag.Error,
|
||||||
|
@ -226,16 +314,15 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath
|
||||||
return output, pythonDiagnostics
|
return output, pythonDiagnostics
|
||||||
}
|
}
|
||||||
|
|
||||||
const installExplanation = `If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
|
const pythonInstallExplanation = `Ensure that 'databricks-bundles' is installed in Python environment:
|
||||||
and that the wheel is installed in the Python environment:
|
|
||||||
|
|
||||||
$ .venv/bin/pip install -e .
|
$ .venv/bin/pip install databricks-bundles
|
||||||
|
|
||||||
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
||||||
or activate the environment before running CLI commands:
|
or activate the environment before running CLI commands:
|
||||||
|
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
`
|
`
|
||||||
|
|
||||||
|
@ -245,9 +332,9 @@ or activate the environment before running CLI commands:
|
||||||
func explainProcessErr(stderr string) string {
|
func explainProcessErr(stderr string) string {
|
||||||
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
|
// implemented in cpython/Lib/runpy.py and portable across Python 3.x, including pypy
|
||||||
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
|
if strings.Contains(stderr, "Error while finding module specification for 'databricks.bundles.build'") {
|
||||||
summary := color.CyanString("Explanation: ") + "'databricks-pydabs' library is not installed in the Python environment.\n"
|
summary := color.CyanString("Explanation: ") + "'databricks-bundles' library is not installed in the Python environment.\n"
|
||||||
|
|
||||||
return stderr + "\n" + summary + "\n" + installExplanation
|
return stderr + "\n" + summary + "\n" + pythonInstallExplanation
|
||||||
}
|
}
|
||||||
|
|
||||||
return stderr
|
return stderr
|
||||||
|
@ -277,10 +364,10 @@ func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
|
||||||
//
|
//
|
||||||
// virtualPath has to stay in rootPath, because locations outside root path are not allowed:
|
// virtualPath has to stay in rootPath, because locations outside root path are not allowed:
|
||||||
//
|
//
|
||||||
// Error: path /var/folders/.../pydabs/dist/*.whl is not contained in bundle root path
|
// Error: path /var/folders/.../python/dist/*.whl is not contained in bundle root path
|
||||||
//
|
//
|
||||||
// for that, we pass virtualPath instead of outputPath as file location
|
// for that, we pass virtualPath instead of outputPath as file location
|
||||||
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_pydabs__.yml"))
|
virtualPath, err := filepath.Abs(filepath.Join(rootPath, "__generated_by_python__.yml"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
|
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to get absolute path: %w", err))
|
||||||
}
|
}
|
||||||
|
@ -334,19 +421,23 @@ func loadDiagnosticsFile(path string) (diag.Diagnostics, error) {
|
||||||
func createOverrideVisitor(ctx context.Context, phase phase) (merge.OverrideVisitor, error) {
|
func createOverrideVisitor(ctx context.Context, phase phase) (merge.OverrideVisitor, error) {
|
||||||
switch phase {
|
switch phase {
|
||||||
case PythonMutatorPhaseLoad:
|
case PythonMutatorPhaseLoad:
|
||||||
return createLoadOverrideVisitor(ctx), nil
|
return createLoadResourcesOverrideVisitor(ctx), nil
|
||||||
case PythonMutatorPhaseInit:
|
case PythonMutatorPhaseInit:
|
||||||
return createInitOverrideVisitor(ctx), nil
|
return createInitOverrideVisitor(ctx, insertResourceModeAllow), nil
|
||||||
|
case PythonMutatorPhaseLoadResources:
|
||||||
|
return createLoadResourcesOverrideVisitor(ctx), nil
|
||||||
|
case PythonMutatorPhaseApplyMutators:
|
||||||
|
return createInitOverrideVisitor(ctx, insertResourceModeDisallow), nil
|
||||||
default:
|
default:
|
||||||
return merge.OverrideVisitor{}, fmt.Errorf("unknown phase: %s", phase)
|
return merge.OverrideVisitor{}, fmt.Errorf("unknown phase: %s", phase)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// createLoadOverrideVisitor creates an override visitor for the load phase.
|
// createLoadResourcesOverrideVisitor creates an override visitor for the load_resources phase.
|
||||||
//
|
//
|
||||||
// During load, it's only possible to create new resources, and not modify or
|
// During load_resources, it's only possible to create new resources, and not modify or
|
||||||
// delete existing ones.
|
// delete existing ones.
|
||||||
func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
func createLoadResourcesOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
||||||
resourcesPath := dyn.NewPath(dyn.Key("resources"))
|
resourcesPath := dyn.NewPath(dyn.Key("resources"))
|
||||||
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
|
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
|
||||||
|
|
||||||
|
@ -385,11 +476,21 @@ func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// insertResourceMode controls whether createInitOverrideVisitor allows or disallows inserting new resources.
|
||||||
|
type insertResourceMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
insertResourceModeDisallow insertResourceMode = iota
|
||||||
|
insertResourceModeAllow insertResourceMode = iota
|
||||||
|
)
|
||||||
|
|
||||||
// createInitOverrideVisitor creates an override visitor for the init phase.
|
// createInitOverrideVisitor creates an override visitor for the init phase.
|
||||||
//
|
//
|
||||||
// During the init phase it's possible to create new resources, modify existing
|
// During the init phase it's possible to create new resources, modify existing
|
||||||
// resources, but not delete existing resources.
|
// resources, but not delete existing resources.
|
||||||
func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
//
|
||||||
|
// If mode is insertResourceModeDisallow, it matching expected behaviour of apply_mutators
|
||||||
|
func createInitOverrideVisitor(ctx context.Context, mode insertResourceMode) merge.OverrideVisitor {
|
||||||
resourcesPath := dyn.NewPath(dyn.Key("resources"))
|
resourcesPath := dyn.NewPath(dyn.Key("resources"))
|
||||||
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
|
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
|
||||||
|
|
||||||
|
@ -424,6 +525,11 @@ func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
||||||
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
|
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
insertResource := len(valuePath) == len(jobsPath)+1
|
||||||
|
if mode == insertResourceModeDisallow && insertResource {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (insert)", valuePath.String())
|
||||||
|
}
|
||||||
|
|
||||||
log.Debugf(ctx, "Insert value at %q", valuePath.String())
|
log.Debugf(ctx, "Insert value at %q", valuePath.String())
|
||||||
|
|
||||||
return right, nil
|
return right, nil
|
||||||
|
@ -441,9 +547,9 @@ func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
func isOmitemptyDelete(left dyn.Value) bool {
|
func isOmitemptyDelete(left dyn.Value) bool {
|
||||||
// PyDABs can omit empty sequences/mappings in output, because we don't track them as optional,
|
// Python output can omit empty sequences/mappings, because we don't track them as optional,
|
||||||
// there is no semantic difference between empty and missing, so we keep them as they were before
|
// there is no semantic difference between empty and missing, so we keep them as they were before
|
||||||
// PyDABs deleted them.
|
// Python mutator deleted them.
|
||||||
|
|
||||||
switch left.Kind() {
|
switch left.Kind() {
|
||||||
case dyn.KindMap:
|
case dyn.KindMap:
|
||||||
|
|
|
@ -2,6 +2,7 @@ package python
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
@ -39,13 +40,25 @@ func TestPythonMutator_Name_init(t *testing.T) {
|
||||||
assert.Equal(t, "PythonMutator(init)", mutator.Name())
|
assert.Equal(t, "PythonMutator(init)", mutator.Name())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPythonMutator_load(t *testing.T) {
|
func TestPythonMutator_Name_loadResources(t *testing.T) {
|
||||||
|
mutator := PythonMutator(PythonMutatorPhaseLoadResources)
|
||||||
|
|
||||||
|
assert.Equal(t, "PythonMutator(load_resources)", mutator.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPythonMutator_Name_applyMutators(t *testing.T) {
|
||||||
|
mutator := PythonMutator(PythonMutatorPhaseApplyMutators)
|
||||||
|
|
||||||
|
assert.Equal(t, "PythonMutator(apply_mutators)", mutator.Name())
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPythonMutator_loadResources(t *testing.T) {
|
||||||
withFakeVEnv(t, ".venv")
|
withFakeVEnv(t, ".venv")
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
enabled: true
|
resources: ["resources:load_resources"]
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -59,12 +72,12 @@ func TestPythonMutator_load(t *testing.T) {
|
||||||
"-m",
|
"-m",
|
||||||
"databricks.bundles.build",
|
"databricks.bundles.build",
|
||||||
"--phase",
|
"--phase",
|
||||||
"load",
|
"load_resources",
|
||||||
},
|
},
|
||||||
`{
|
`{
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"pydabs": {
|
"python": {
|
||||||
"enabled": true,
|
"resources": ["resources:load_resources"],
|
||||||
"venv_path": ".venv"
|
"venv_path": ".venv"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -82,7 +95,7 @@ func TestPythonMutator_load(t *testing.T) {
|
||||||
`{"severity": "warning", "summary": "job doesn't have any tasks", "location": {"file": "src/examples/file.py", "line": 10, "column": 5}}`,
|
`{"severity": "warning", "summary": "job doesn't have any tasks", "location": {"file": "src/examples/file.py", "line": 10, "column": 5}}`,
|
||||||
)
|
)
|
||||||
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseLoad)
|
mutator := PythonMutator(PythonMutatorPhaseLoadResources)
|
||||||
diags := bundle.Apply(ctx, b, mutator)
|
diags := bundle.Apply(ctx, b, mutator)
|
||||||
|
|
||||||
assert.NoError(t, diags.Error())
|
assert.NoError(t, diags.Error())
|
||||||
|
@ -108,13 +121,12 @@ func TestPythonMutator_load(t *testing.T) {
|
||||||
}, diags[0].Locations)
|
}, diags[0].Locations)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPythonMutator_load_disallowed(t *testing.T) {
|
func TestPythonMutator_loadResources_disallowed(t *testing.T) {
|
||||||
withFakeVEnv(t, ".venv")
|
withFakeVEnv(t, ".venv")
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
enabled: true
|
resources: ["resources:load_resources"]
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
|
@ -128,12 +140,12 @@ func TestPythonMutator_load_disallowed(t *testing.T) {
|
||||||
"-m",
|
"-m",
|
||||||
"databricks.bundles.build",
|
"databricks.bundles.build",
|
||||||
"--phase",
|
"--phase",
|
||||||
"load",
|
"load_resources",
|
||||||
},
|
},
|
||||||
`{
|
`{
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"pydabs": {
|
"python": {
|
||||||
"enabled": true,
|
"resources": ["resources:load_resources"],
|
||||||
"venv_path": ".venv"
|
"venv_path": ".venv"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -147,20 +159,20 @@ func TestPythonMutator_load_disallowed(t *testing.T) {
|
||||||
}
|
}
|
||||||
}`, "")
|
}`, "")
|
||||||
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseLoad)
|
mutator := PythonMutator(PythonMutatorPhaseLoadResources)
|
||||||
diag := bundle.Apply(ctx, b, mutator)
|
diag := bundle.Apply(ctx, b, mutator)
|
||||||
|
|
||||||
assert.EqualError(t, diag.Error(), "unexpected change at \"resources.jobs.job0.description\" (insert)")
|
assert.EqualError(t, diag.Error(), "unexpected change at \"resources.jobs.job0.description\" (insert)")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPythonMutator_init(t *testing.T) {
|
func TestPythonMutator_applyMutators(t *testing.T) {
|
||||||
withFakeVEnv(t, ".venv")
|
withFakeVEnv(t, ".venv")
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
enabled: true
|
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
|
mutators:
|
||||||
|
- "mutators:add_description"
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
job0:
|
job0:
|
||||||
|
@ -173,13 +185,13 @@ func TestPythonMutator_init(t *testing.T) {
|
||||||
"-m",
|
"-m",
|
||||||
"databricks.bundles.build",
|
"databricks.bundles.build",
|
||||||
"--phase",
|
"--phase",
|
||||||
"init",
|
"apply_mutators",
|
||||||
},
|
},
|
||||||
`{
|
`{
|
||||||
"experimental": {
|
"experimental": {
|
||||||
"pydabs": {
|
"python": {
|
||||||
"enabled": true,
|
"venv_path": ".venv",
|
||||||
"venv_path": ".venv"
|
"mutators": ["mutators:add_description"]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"resources": {
|
"resources": {
|
||||||
|
@ -192,7 +204,7 @@ func TestPythonMutator_init(t *testing.T) {
|
||||||
}
|
}
|
||||||
}`, "")
|
}`, "")
|
||||||
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseInit)
|
mutator := PythonMutator(PythonMutatorPhaseApplyMutators)
|
||||||
diag := bundle.Apply(ctx, b, mutator)
|
diag := bundle.Apply(ctx, b, mutator)
|
||||||
|
|
||||||
assert.NoError(t, diag.Error())
|
assert.NoError(t, diag.Error())
|
||||||
|
@ -207,12 +219,12 @@ func TestPythonMutator_init(t *testing.T) {
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "databricks.yml", name.Location().File)
|
assert.Equal(t, "databricks.yml", name.Location().File)
|
||||||
|
|
||||||
// 'description' was updated by PyDABs and has location of generated file until
|
// 'description' was updated by Python code and has location of generated file until
|
||||||
// we implement source maps
|
// we implement source maps
|
||||||
description, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job0.description"))
|
description, err := dyn.GetByPath(v, dyn.MustPathFromString("resources.jobs.job0.description"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
expectedVirtualPath, err := filepath.Abs("__generated_by_pydabs__.yml")
|
expectedVirtualPath, err := filepath.Abs("__generated_by_python__.yml")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, expectedVirtualPath, description.Location().File)
|
assert.Equal(t, expectedVirtualPath, description.Location().File)
|
||||||
|
|
||||||
|
@ -223,12 +235,12 @@ func TestPythonMutator_init(t *testing.T) {
|
||||||
|
|
||||||
func TestPythonMutator_badOutput(t *testing.T) {
|
func TestPythonMutator_badOutput(t *testing.T) {
|
||||||
withFakeVEnv(t, ".venv")
|
withFakeVEnv(t, ".venv")
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
enabled: true
|
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
|
resources:
|
||||||
|
- "resources:load_resources"
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
job0:
|
job0:
|
||||||
|
@ -241,7 +253,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
|
||||||
"-m",
|
"-m",
|
||||||
"databricks.bundles.build",
|
"databricks.bundles.build",
|
||||||
"--phase",
|
"--phase",
|
||||||
"load",
|
"load_resources",
|
||||||
},
|
},
|
||||||
`{
|
`{
|
||||||
"resources": {
|
"resources": {
|
||||||
|
@ -253,7 +265,7 @@ func TestPythonMutator_badOutput(t *testing.T) {
|
||||||
}
|
}
|
||||||
}`, "")
|
}`, "")
|
||||||
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseLoad)
|
mutator := PythonMutator(PythonMutatorPhaseLoadResources)
|
||||||
diag := bundle.Apply(ctx, b, mutator)
|
diag := bundle.Apply(ctx, b, mutator)
|
||||||
|
|
||||||
assert.EqualError(t, diag.Error(), "unknown field: unknown_property")
|
assert.EqualError(t, diag.Error(), "unknown field: unknown_property")
|
||||||
|
@ -269,34 +281,63 @@ func TestPythonMutator_disabled(t *testing.T) {
|
||||||
assert.NoError(t, diag.Error())
|
assert.NoError(t, diag.Error())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestPythonMutator_venvRequired(t *testing.T) {
|
|
||||||
b := loadYaml("databricks.yml", `
|
|
||||||
experimental:
|
|
||||||
pydabs:
|
|
||||||
enabled: true`)
|
|
||||||
|
|
||||||
ctx := context.Background()
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseLoad)
|
|
||||||
diag := bundle.Apply(ctx, b, mutator)
|
|
||||||
|
|
||||||
assert.Error(t, diag.Error(), "\"experimental.enable_pydabs\" is enabled, but \"experimental.venv.path\" is not set")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPythonMutator_venvNotFound(t *testing.T) {
|
func TestPythonMutator_venvNotFound(t *testing.T) {
|
||||||
expectedError := fmt.Sprintf("failed to get Python interpreter path: can't find %q, check if virtualenv is created", interpreterPath("bad_path"))
|
expectedError := fmt.Sprintf("failed to get Python interpreter path: can't find %q, check if virtualenv is created", interpreterPath("bad_path"))
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
enabled: true
|
venv_path: bad_path
|
||||||
venv_path: bad_path`)
|
resources:
|
||||||
|
- "resources:load_resources"`)
|
||||||
|
|
||||||
mutator := PythonMutator(PythonMutatorPhaseInit)
|
mutator := PythonMutator(PythonMutatorPhaseLoadResources)
|
||||||
diag := bundle.Apply(context.Background(), b, mutator)
|
diag := bundle.Apply(context.Background(), b, mutator)
|
||||||
|
|
||||||
assert.EqualError(t, diag.Error(), expectedError)
|
assert.EqualError(t, diag.Error(), expectedError)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestGetOps_Python(t *testing.T) {
|
||||||
|
actual, err := getOpts(&bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Experimental: &config.Experimental{
|
||||||
|
Python: config.Python{
|
||||||
|
VEnvPath: ".venv",
|
||||||
|
Resources: []string{
|
||||||
|
"resources:load_resources",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, PythonMutatorPhaseLoadResources)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, opts{venvPath: ".venv", enabled: true}, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetOps_PyDABs(t *testing.T) {
|
||||||
|
actual, err := getOpts(&bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Experimental: &config.Experimental{
|
||||||
|
PyDABs: config.PyDABs{
|
||||||
|
VEnvPath: ".venv",
|
||||||
|
Enabled: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, PythonMutatorPhaseInit)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, opts{venvPath: ".venv", enabled: true}, actual)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestGetOps_empty(t *testing.T) {
|
||||||
|
actual, err := getOpts(&bundle.Bundle{}, PythonMutatorPhaseLoadResources)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Equal(t, opts{enabled: false}, actual)
|
||||||
|
}
|
||||||
|
|
||||||
type createOverrideVisitorTestCase struct {
|
type createOverrideVisitorTestCase struct {
|
||||||
name string
|
name string
|
||||||
updatePath dyn.Path
|
updatePath dyn.Path
|
||||||
|
@ -314,48 +355,48 @@ func TestCreateOverrideVisitor(t *testing.T) {
|
||||||
|
|
||||||
testCases := []createOverrideVisitorTestCase{
|
testCases := []createOverrideVisitorTestCase{
|
||||||
{
|
{
|
||||||
name: "load: can't change an existing job",
|
name: "load_resources: can't change an existing job",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
updatePath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
updatePath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
||||||
deletePath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
deletePath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
||||||
insertPath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
insertPath: dyn.MustPathFromString("resources.jobs.job0.name"),
|
||||||
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (delete)"),
|
deleteError: errors.New("unexpected change at \"resources.jobs.job0.name\" (delete)"),
|
||||||
insertError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (insert)"),
|
insertError: errors.New("unexpected change at \"resources.jobs.job0.name\" (insert)"),
|
||||||
updateError: fmt.Errorf("unexpected change at \"resources.jobs.job0.name\" (update)"),
|
updateError: errors.New("unexpected change at \"resources.jobs.job0.name\" (update)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "load: can't delete an existing job",
|
name: "load_resources: can't delete an existing job",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
|
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
|
||||||
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0\" (delete)"),
|
deleteError: errors.New("unexpected change at \"resources.jobs.job0\" (delete)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "load: can insert 'resources'",
|
name: "load_resources: can insert 'resources'",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
insertPath: dyn.MustPathFromString("resources"),
|
insertPath: dyn.MustPathFromString("resources"),
|
||||||
insertError: nil,
|
insertError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "load: can insert 'resources.jobs'",
|
name: "load_resources: can insert 'resources.jobs'",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
insertPath: dyn.MustPathFromString("resources.jobs"),
|
insertPath: dyn.MustPathFromString("resources.jobs"),
|
||||||
insertError: nil,
|
insertError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "load: can insert a job",
|
name: "load_resources: can insert a job",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
insertPath: dyn.MustPathFromString("resources.jobs.job0"),
|
insertPath: dyn.MustPathFromString("resources.jobs.job0"),
|
||||||
insertError: nil,
|
insertError: nil,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "load: can't change include",
|
name: "load_resources: can't change include",
|
||||||
phase: PythonMutatorPhaseLoad,
|
phase: PythonMutatorPhaseLoadResources,
|
||||||
deletePath: dyn.MustPathFromString("include[0]"),
|
deletePath: dyn.MustPathFromString("include[0]"),
|
||||||
insertPath: dyn.MustPathFromString("include[0]"),
|
insertPath: dyn.MustPathFromString("include[0]"),
|
||||||
updatePath: dyn.MustPathFromString("include[0]"),
|
updatePath: dyn.MustPathFromString("include[0]"),
|
||||||
deleteError: fmt.Errorf("unexpected change at \"include[0]\" (delete)"),
|
deleteError: errors.New("unexpected change at \"include[0]\" (delete)"),
|
||||||
insertError: fmt.Errorf("unexpected change at \"include[0]\" (insert)"),
|
insertError: errors.New("unexpected change at \"include[0]\" (insert)"),
|
||||||
updateError: fmt.Errorf("unexpected change at \"include[0]\" (update)"),
|
updateError: errors.New("unexpected change at \"include[0]\" (update)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "init: can change an existing job",
|
name: "init: can change an existing job",
|
||||||
|
@ -371,7 +412,7 @@ func TestCreateOverrideVisitor(t *testing.T) {
|
||||||
name: "init: can't delete an existing job",
|
name: "init: can't delete an existing job",
|
||||||
phase: PythonMutatorPhaseInit,
|
phase: PythonMutatorPhaseInit,
|
||||||
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
|
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
|
||||||
deleteError: fmt.Errorf("unexpected change at \"resources.jobs.job0\" (delete)"),
|
deleteError: errors.New("unexpected change at \"resources.jobs.job0\" (delete)"),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "init: can insert 'resources'",
|
name: "init: can insert 'resources'",
|
||||||
|
@ -397,9 +438,43 @@ func TestCreateOverrideVisitor(t *testing.T) {
|
||||||
deletePath: dyn.MustPathFromString("include[0]"),
|
deletePath: dyn.MustPathFromString("include[0]"),
|
||||||
insertPath: dyn.MustPathFromString("include[0]"),
|
insertPath: dyn.MustPathFromString("include[0]"),
|
||||||
updatePath: dyn.MustPathFromString("include[0]"),
|
updatePath: dyn.MustPathFromString("include[0]"),
|
||||||
deleteError: fmt.Errorf("unexpected change at \"include[0]\" (delete)"),
|
deleteError: errors.New("unexpected change at \"include[0]\" (delete)"),
|
||||||
insertError: fmt.Errorf("unexpected change at \"include[0]\" (insert)"),
|
insertError: errors.New("unexpected change at \"include[0]\" (insert)"),
|
||||||
updateError: fmt.Errorf("unexpected change at \"include[0]\" (update)"),
|
updateError: errors.New("unexpected change at \"include[0]\" (update)"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apply_mutators: can't delete an existing job",
|
||||||
|
phase: PythonMutatorPhaseInit,
|
||||||
|
deletePath: dyn.MustPathFromString("resources.jobs.job0"),
|
||||||
|
deleteError: errors.New("unexpected change at \"resources.jobs.job0\" (delete)"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apply_mutators: can insert 'resources'",
|
||||||
|
phase: PythonMutatorPhaseApplyMutators,
|
||||||
|
insertPath: dyn.MustPathFromString("resources"),
|
||||||
|
insertError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apply_mutators: can insert 'resources.jobs'",
|
||||||
|
phase: PythonMutatorPhaseApplyMutators,
|
||||||
|
insertPath: dyn.MustPathFromString("resources.jobs"),
|
||||||
|
insertError: nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apply_mutators: can't insert a job",
|
||||||
|
phase: PythonMutatorPhaseApplyMutators,
|
||||||
|
insertPath: dyn.MustPathFromString("resources.jobs.job0"),
|
||||||
|
insertError: errors.New("unexpected change at \"resources.jobs.job0\" (insert)"),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "apply_mutators: can't change include",
|
||||||
|
phase: PythonMutatorPhaseApplyMutators,
|
||||||
|
deletePath: dyn.MustPathFromString("include[0]"),
|
||||||
|
insertPath: dyn.MustPathFromString("include[0]"),
|
||||||
|
updatePath: dyn.MustPathFromString("include[0]"),
|
||||||
|
deleteError: errors.New("unexpected change at \"include[0]\" (delete)"),
|
||||||
|
insertError: errors.New("unexpected change at \"include[0]\" (insert)"),
|
||||||
|
updateError: errors.New("unexpected change at \"include[0]\" (update)"),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -458,9 +533,9 @@ type overrideVisitorOmitemptyTestCase struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestCreateOverrideVisitor_omitempty(t *testing.T) {
|
func TestCreateOverrideVisitor_omitempty(t *testing.T) {
|
||||||
// PyDABs can omit empty sequences/mappings in output, because we don't track them as optional,
|
// Python output can omit empty sequences/mappings in output, because we don't track them as optional,
|
||||||
// there is no semantic difference between empty and missing, so we keep them as they were before
|
// there is no semantic difference between empty and missing, so we keep them as they were before
|
||||||
// PyDABs deleted them.
|
// Python code deleted them.
|
||||||
|
|
||||||
allPhases := []phase{PythonMutatorPhaseLoad, PythonMutatorPhaseInit}
|
allPhases := []phase{PythonMutatorPhaseLoad, PythonMutatorPhaseInit}
|
||||||
location := dyn.Location{
|
location := dyn.Location{
|
||||||
|
@ -567,18 +642,17 @@ func TestExplainProcessErr(t *testing.T) {
|
||||||
stderr := "/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')\n"
|
stderr := "/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')\n"
|
||||||
expected := `/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')
|
expected := `/home/test/.venv/bin/python3: Error while finding module specification for 'databricks.bundles.build' (ModuleNotFoundError: No module named 'databricks')
|
||||||
|
|
||||||
Explanation: 'databricks-pydabs' library is not installed in the Python environment.
|
Explanation: 'databricks-bundles' library is not installed in the Python environment.
|
||||||
|
|
||||||
If using Python wheels, ensure that 'databricks-pydabs' is included in the dependencies,
|
Ensure that 'databricks-bundles' is installed in Python environment:
|
||||||
and that the wheel is installed in the Python environment:
|
|
||||||
|
|
||||||
$ .venv/bin/pip install -e .
|
$ .venv/bin/pip install databricks-bundles
|
||||||
|
|
||||||
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
If using a virtual environment, ensure it is specified as the venv_path property in databricks.yml,
|
||||||
or activate the environment before running CLI commands:
|
or activate the environment before running CLI commands:
|
||||||
|
|
||||||
experimental:
|
experimental:
|
||||||
pydabs:
|
python:
|
||||||
venv_path: .venv
|
venv_path: .venv
|
||||||
`
|
`
|
||||||
|
|
||||||
|
|
|
@ -2,9 +2,10 @@ package mutator
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"errors"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
"github.com/databricks/cli/bundle/config/variable"
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
@ -15,7 +16,7 @@ import (
|
||||||
type resolveVariableReferences struct {
|
type resolveVariableReferences struct {
|
||||||
prefixes []string
|
prefixes []string
|
||||||
pattern dyn.Pattern
|
pattern dyn.Pattern
|
||||||
lookupFn func(dyn.Value, dyn.Path) (dyn.Value, error)
|
lookupFn func(dyn.Value, dyn.Path, *bundle.Bundle) (dyn.Value, error)
|
||||||
skipFn func(dyn.Value) bool
|
skipFn func(dyn.Value) bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,16 +45,21 @@ func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func lookup(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
func lookup(v dyn.Value, path dyn.Path, b *bundle.Bundle) (dyn.Value, error) {
|
||||||
|
if config.IsExplicitlyEnabled(b.Config.Presets.SourceLinkedDeployment) {
|
||||||
|
if path.String() == "workspace.file_path" {
|
||||||
|
return dyn.V(b.SyncRootPath), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
// Future opportunity: if we lookup this path in both the given root
|
// Future opportunity: if we lookup this path in both the given root
|
||||||
// and the synthesized root, we know if it was explicitly set or implied to be empty.
|
// and the synthesized root, we know if it was explicitly set or implied to be empty.
|
||||||
// Then we can emit a warning if it was not explicitly set.
|
// Then we can emit a warning if it was not explicitly set.
|
||||||
return dyn.GetByPath(v, path)
|
return dyn.GetByPath(v, path)
|
||||||
}
|
}
|
||||||
|
|
||||||
func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
func lookupForComplexVariables(v dyn.Value, path dyn.Path, b *bundle.Bundle) (dyn.Value, error) {
|
||||||
if path[0].Key() != "variables" {
|
if path[0].Key() != "variables" {
|
||||||
return lookup(v, path)
|
return lookup(v, path, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
varV, err := dyn.GetByPath(v, path[:len(path)-1])
|
varV, err := dyn.GetByPath(v, path[:len(path)-1])
|
||||||
|
@ -68,10 +74,10 @@ func lookupForComplexVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if vv.Type == variable.VariableTypeComplex {
|
if vv.Type == variable.VariableTypeComplex {
|
||||||
return dyn.InvalidValue, fmt.Errorf("complex variables cannot contain references to another complex variables")
|
return dyn.InvalidValue, errors.New("complex variables cannot contain references to another complex variables")
|
||||||
}
|
}
|
||||||
|
|
||||||
return lookup(v, path)
|
return lookup(v, path, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func skipResolvingInNonComplexVariables(v dyn.Value) bool {
|
func skipResolvingInNonComplexVariables(v dyn.Value) bool {
|
||||||
|
@ -83,9 +89,9 @@ func skipResolvingInNonComplexVariables(v dyn.Value) bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
func lookupForVariables(v dyn.Value, path dyn.Path, b *bundle.Bundle) (dyn.Value, error) {
|
||||||
if path[0].Key() != "variables" {
|
if path[0].Key() != "variables" {
|
||||||
return lookup(v, path)
|
return lookup(v, path, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
varV, err := dyn.GetByPath(v, path[:len(path)-1])
|
varV, err := dyn.GetByPath(v, path[:len(path)-1])
|
||||||
|
@ -100,10 +106,10 @@ func lookupForVariables(v dyn.Value, path dyn.Path) (dyn.Value, error) {
|
||||||
}
|
}
|
||||||
|
|
||||||
if vv.Lookup != nil && vv.Lookup.String() != "" {
|
if vv.Lookup != nil && vv.Lookup.String() != "" {
|
||||||
return dyn.InvalidValue, fmt.Errorf("lookup variables cannot contain references to another lookup variables")
|
return dyn.InvalidValue, errors.New("lookup variables cannot contain references to another lookup variables")
|
||||||
}
|
}
|
||||||
|
|
||||||
return lookup(v, path)
|
return lookup(v, path, b)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (*resolveVariableReferences) Name() string {
|
func (*resolveVariableReferences) Name() string {
|
||||||
|
@ -125,6 +131,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
|
||||||
varPath := dyn.NewPath(dyn.Key("var"))
|
varPath := dyn.NewPath(dyn.Key("var"))
|
||||||
|
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
// Synthesize a copy of the root that has all fields that are present in the type
|
// Synthesize a copy of the root that has all fields that are present in the type
|
||||||
// but not set in the dynamic value set to their corresponding empty value.
|
// but not set in the dynamic value set to their corresponding empty value.
|
||||||
|
@ -167,7 +174,7 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
|
||||||
if m.skipFn != nil && m.skipFn(v) {
|
if m.skipFn != nil && m.skipFn(v) {
|
||||||
return dyn.InvalidValue, dynvar.ErrSkipResolution
|
return dyn.InvalidValue, dynvar.ErrSkipResolution
|
||||||
}
|
}
|
||||||
return m.lookupFn(normalized, path)
|
return m.lookupFn(normalized, path, b)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,36 +12,11 @@ import (
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/pipelines"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestResolveVariableReferences(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
Config: config.Root{
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "example",
|
|
||||||
},
|
|
||||||
Workspace: config.Workspace{
|
|
||||||
RootPath: "${bundle.name}/bar",
|
|
||||||
FilePath: "${workspace.root_path}/baz",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply with an invalid prefix. This should not change the workspace root path.
|
|
||||||
diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("doesntexist"))
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
require.Equal(t, "${bundle.name}/bar", b.Config.Workspace.RootPath)
|
|
||||||
require.Equal(t, "${workspace.root_path}/baz", b.Config.Workspace.FilePath)
|
|
||||||
|
|
||||||
// Apply with a valid prefix. This should change the workspace root path.
|
|
||||||
diags = bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle", "workspace"))
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
|
|
||||||
require.Equal(t, "example/bar/baz", b.Config.Workspace.FilePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
|
@ -65,37 +40,6 @@ func TestResolveVariableReferencesToBundleVariables(t *testing.T) {
|
||||||
require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
|
require.Equal(t, "example/bar", b.Config.Workspace.RootPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestResolveVariableReferencesToEmptyFields(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
Config: config.Root{
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "example",
|
|
||||||
Git: config.Git{
|
|
||||||
Branch: "",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
Resources: config.Resources{
|
|
||||||
Jobs: map[string]*resources.Job{
|
|
||||||
"job1": {
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
Tags: map[string]string{
|
|
||||||
"git_branch": "${bundle.git.branch}",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply for the bundle prefix.
|
|
||||||
diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("bundle"))
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
|
|
||||||
// The job settings should have been interpolated to an empty string.
|
|
||||||
require.Equal(t, "", b.Config.Resources.Jobs["job1"].JobSettings.Tags["git_branch"])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
|
func TestResolveVariableReferencesForPrimitiveNonStringFields(t *testing.T) {
|
||||||
var diags diag.Diagnostics
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
|
@ -250,63 +194,6 @@ func TestResolveComplexVariable(t *testing.T) {
|
||||||
require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers)
|
require.Equal(t, 2, b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NumWorkers)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestResolveComplexVariableReferencesToFields(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
Config: config.Root{
|
|
||||||
Bundle: config.Bundle{
|
|
||||||
Name: "example",
|
|
||||||
},
|
|
||||||
Variables: map[string]*variable.Variable{
|
|
||||||
"cluster": {
|
|
||||||
Value: map[string]any{
|
|
||||||
"node_type_id": "Standard_DS3_v2",
|
|
||||||
"num_workers": 2,
|
|
||||||
},
|
|
||||||
Type: variable.VariableTypeComplex,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
|
|
||||||
Resources: config.Resources{
|
|
||||||
Jobs: map[string]*resources.Job{
|
|
||||||
"job1": {
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
JobClusters: []jobs.JobCluster{
|
|
||||||
{
|
|
||||||
NewCluster: compute.ClusterSpec{
|
|
||||||
NodeTypeId: "random",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
// Assign the variables to the dynamic configuration.
|
|
||||||
diags := bundle.ApplyFunc(ctx, b, func(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
|
||||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
|
||||||
var p dyn.Path
|
|
||||||
var err error
|
|
||||||
|
|
||||||
p = dyn.MustPathFromString("resources.jobs.job1.job_clusters[0].new_cluster")
|
|
||||||
v, err = dyn.SetByPath(v, p.Append(dyn.Key("node_type_id")), dyn.V("${var.cluster.node_type_id}"))
|
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
return v, nil
|
|
||||||
})
|
|
||||||
return diag.FromErr(err)
|
|
||||||
})
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
|
|
||||||
diags = bundle.Apply(ctx, b, ResolveVariableReferences("bundle", "workspace", "variables"))
|
|
||||||
require.NoError(t, diags.Error())
|
|
||||||
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["job1"].JobSettings.JobClusters[0].NewCluster.NodeTypeId)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) {
|
func TestResolveComplexVariableReferencesWithComplexVariablesError(t *testing.T) {
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
|
@ -434,3 +321,57 @@ func TestResolveComplexVariableWithVarReference(t *testing.T) {
|
||||||
require.NoError(t, diags.Error())
|
require.NoError(t, diags.Error())
|
||||||
require.Equal(t, "cicd_template==1.0.0", b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].Libraries[0].Pypi.Package)
|
require.Equal(t, "cicd_template==1.0.0", b.Config.Resources.Jobs["job1"].JobSettings.Tasks[0].Libraries[0].Pypi.Package)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestResolveVariableReferencesWithSourceLinkedDeployment(t *testing.T) {
|
||||||
|
testCases := []struct {
|
||||||
|
enabled bool
|
||||||
|
assert func(t *testing.T, b *bundle.Bundle)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
true,
|
||||||
|
func(t *testing.T, b *bundle.Bundle) {
|
||||||
|
// Variables that use workspace file path should have SyncRootValue during resolution phase
|
||||||
|
require.Equal(t, "sync/root/path", b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Configuration["source"])
|
||||||
|
|
||||||
|
// The file path itself should remain the same
|
||||||
|
require.Equal(t, "file/path", b.Config.Workspace.FilePath)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
false,
|
||||||
|
func(t *testing.T, b *bundle.Bundle) {
|
||||||
|
require.Equal(t, "file/path", b.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Configuration["source"])
|
||||||
|
require.Equal(t, "file/path", b.Config.Workspace.FilePath)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, testCase := range testCases {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
SyncRootPath: "sync/root/path",
|
||||||
|
Config: config.Root{
|
||||||
|
Presets: config.Presets{
|
||||||
|
SourceLinkedDeployment: &testCase.enabled,
|
||||||
|
},
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
FilePath: "file/path",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Pipelines: map[string]*resources.Pipeline{
|
||||||
|
"pipeline1": {
|
||||||
|
PipelineSpec: &pipelines.PipelineSpec{
|
||||||
|
Configuration: map[string]string{
|
||||||
|
"source": "${workspace.file_path}",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, ResolveVariableReferences("workspace"))
|
||||||
|
require.NoError(t, diags.Error())
|
||||||
|
testCase.assert(t, b)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -45,7 +44,7 @@ func (s *Cluster) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("compute/clusters/%s", s.ID)
|
baseURL.Path = "compute/clusters/" + s.ID
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
@ -52,7 +51,7 @@ func (j *Job) InitializeURL(baseURL url.URL) {
|
||||||
if j.ID == "" {
|
if j.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("jobs/%s", j.ID)
|
baseURL.Path = "jobs/" + j.ID
|
||||||
j.URL = baseURL.String()
|
j.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -47,7 +46,7 @@ func (s *MlflowExperiment) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("ml/experiments/%s", s.ID)
|
baseURL.Path = "ml/experiments/" + s.ID
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -47,7 +46,7 @@ func (s *MlflowModel) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("ml/models/%s", s.ID)
|
baseURL.Path = "ml/models/" + s.ID
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -55,7 +54,7 @@ func (s *ModelServingEndpoint) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("ml/endpoints/%s", s.ID)
|
baseURL.Path = "ml/endpoints/" + s.ID
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,5 +25,5 @@ func (p Permission) String() string {
|
||||||
return fmt.Sprintf("level: %s, group_name: %s", p.Level, p.GroupName)
|
return fmt.Sprintf("level: %s, group_name: %s", p.Level, p.GroupName)
|
||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprintf("level: %s", p.Level)
|
return "level: " + p.Level
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/log"
|
"github.com/databricks/cli/libs/log"
|
||||||
|
@ -47,7 +46,7 @@ func (p *Pipeline) InitializeURL(baseURL url.URL) {
|
||||||
if p.ID == "" {
|
if p.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("pipelines/%s", p.ID)
|
baseURL.Path = "pipelines/" + p.ID
|
||||||
p.URL = baseURL.String()
|
p.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -51,7 +50,7 @@ func (s *QualityMonitor) InitializeURL(baseURL url.URL) {
|
||||||
if s.TableName == "" {
|
if s.TableName == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.TableName, ".", "/"))
|
baseURL.Path = "explore/data/" + strings.ReplaceAll(s.TableName, ".", "/")
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -57,7 +56,7 @@ func (s *RegisteredModel) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("explore/data/models/%s", strings.ReplaceAll(s.ID, ".", "/"))
|
baseURL.Path = "explore/data/models/" + strings.ReplaceAll(s.ID, ".", "/")
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@ package resources
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"errors"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ type Schema struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Schema) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
func (s *Schema) Exists(ctx context.Context, w *databricks.WorkspaceClient, id string) (bool, error) {
|
||||||
return false, fmt.Errorf("schema.Exists() is not supported")
|
return false, errors.New("schema.Exists() is not supported")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s *Schema) TerraformResourceName() string {
|
func (s *Schema) TerraformResourceName() string {
|
||||||
|
@ -37,7 +37,7 @@ func (s *Schema) InitializeURL(baseURL url.URL) {
|
||||||
if s.ID == "" {
|
if s.ID == "" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
baseURL.Path = fmt.Sprintf("explore/data/%s", strings.ReplaceAll(s.ID, ".", "/"))
|
baseURL.Path = "explore/data/" + strings.ReplaceAll(s.ID, ".", "/")
|
||||||
s.URL = baseURL.String()
|
s.URL = baseURL.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue