mirror of https://github.com/databricks/cli.git
merge
This commit is contained in:
commit
acd64fa296
|
@ -10,19 +10,65 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
publish-to-winget-pkgs:
|
publish-to-winget-pkgs:
|
||||||
runs-on:
|
runs-on:
|
||||||
group: databricks-protected-runner-group
|
group: databricks-deco-testing-runner-group
|
||||||
labels: windows-server-latest
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: release
|
environment: release
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2
|
- name: Checkout repository and submodules
|
||||||
with:
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
identifier: Databricks.DatabricksCLI
|
|
||||||
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
# When updating the version of komac, make sure to update the checksum in the next step.
|
||||||
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
# Find both at https://github.com/russellbanks/Komac/releases.
|
||||||
fork-user: eng-dev-ecosystem-bot
|
- name: Download komac binary
|
||||||
|
run: |
|
||||||
|
curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
|
||||||
|
- name: Verify komac binary
|
||||||
|
run: |
|
||||||
|
echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
|
||||||
|
|
||||||
|
- name: Untar komac binary to temporary path
|
||||||
|
run: |
|
||||||
|
mkdir -p $RUNNER_TEMP/komac
|
||||||
|
tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
|
||||||
|
|
||||||
|
- name: Add komac to PATH
|
||||||
|
run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Confirm komac version
|
||||||
|
run: komac --version
|
||||||
|
|
||||||
# Use the tag from the input, or the ref name if the input is not provided.
|
# Use the tag from the input, or the ref name if the input is not provided.
|
||||||
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||||
release-tag: ${{ inputs.tag || github.ref_name }}
|
- name: Strip "v" prefix from version
|
||||||
|
id: strip_version
|
||||||
|
run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Get URLs of signed Windows binaries
|
||||||
|
id: get_windows_urls
|
||||||
|
run: |
|
||||||
|
urls=$(
|
||||||
|
gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
|
||||||
|
jq -r .assets[].browser_download_url | \
|
||||||
|
grep -E '_windows_.*-signed\.zip$' | \
|
||||||
|
tr '\n' ' '
|
||||||
|
)
|
||||||
|
if [ -z "$urls" ]; then
|
||||||
|
echo "No signed Windows binaries found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "urls=$urls" >> "$GITHUB_OUTPUT"
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Publish to Winget
|
||||||
|
run: |
|
||||||
|
komac update Databricks.DatabricksCLI \
|
||||||
|
--version ${{ steps.strip_version.outputs.version }} \
|
||||||
|
--submit \
|
||||||
|
--urls ${{ steps.get_windows_urls.outputs.urls }} \
|
||||||
|
env:
|
||||||
|
KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||||
|
|
|
@ -60,12 +60,6 @@ jobs:
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
||||||
|
|
||||||
- name: Run ruff
|
|
||||||
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
|
|
||||||
with:
|
|
||||||
version: "0.9.1"
|
|
||||||
args: "format --check"
|
|
||||||
|
|
||||||
- name: Set go env
|
- name: Set go env
|
||||||
run: |
|
run: |
|
||||||
echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
|
echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
|
||||||
|
@ -80,7 +74,7 @@ jobs:
|
||||||
- name: Run tests with coverage
|
- name: Run tests with coverage
|
||||||
run: make cover
|
run: make cover
|
||||||
|
|
||||||
golangci:
|
linters:
|
||||||
needs: cleanups
|
needs: cleanups
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -105,6 +99,11 @@ jobs:
|
||||||
with:
|
with:
|
||||||
version: v1.63.4
|
version: v1.63.4
|
||||||
args: --timeout=15m
|
args: --timeout=15m
|
||||||
|
- name: Run ruff
|
||||||
|
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
|
||||||
|
with:
|
||||||
|
version: "0.9.1"
|
||||||
|
args: "format --check"
|
||||||
|
|
||||||
validate-bundle-schema:
|
validate-bundle-schema:
|
||||||
needs: cleanups
|
needs: cleanups
|
||||||
|
|
20
CHANGELOG.md
20
CHANGELOG.md
|
@ -1,5 +1,25 @@
|
||||||
# Version changelog
|
# Version changelog
|
||||||
|
|
||||||
|
## [Release] Release v0.239.1
|
||||||
|
|
||||||
|
CLI:
|
||||||
|
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
|
||||||
|
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
|
||||||
|
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
|
||||||
|
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
|
||||||
|
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
|
||||||
|
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
|
||||||
|
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
|
||||||
|
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
|
||||||
|
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
|
||||||
|
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
|
||||||
|
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## [Release] Release v0.239.0
|
## [Release] Release v0.239.0
|
||||||
|
|
||||||
### New feature announcement
|
### New feature announcement
|
||||||
|
|
5
Makefile
5
Makefile
|
@ -48,6 +48,9 @@ vendor:
|
||||||
schema:
|
schema:
|
||||||
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||||
|
|
||||||
|
docs:
|
||||||
|
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
|
||||||
|
|
||||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||||
|
|
||||||
integration:
|
integration:
|
||||||
|
@ -56,4 +59,4 @@ integration:
|
||||||
integration-short:
|
integration-short:
|
||||||
$(INTEGRATION) -short
|
$(INTEGRATION) -short
|
||||||
|
|
||||||
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
|
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs
|
||||||
|
|
4
NOTICE
4
NOTICE
|
@ -105,3 +105,7 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
|
||||||
https://github.com/hexops/gotextdiff
|
https://github.com/hexops/gotextdiff
|
||||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
||||||
|
|
||||||
|
https://github.com/BurntSushi/toml
|
||||||
|
Copyright (c) 2013 TOML authors
|
||||||
|
https://github.com/BurntSushi/toml/blob/master/COPYING
|
||||||
|
|
|
@ -17,3 +17,5 @@ For more complex tests one can also use:
|
||||||
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
||||||
- `trace` helper: prints the arguments before executing the command.
|
- `trace` helper: prints the arguments before executing the command.
|
||||||
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
||||||
|
|
||||||
|
See [selftest](./selftest) for a toy test.
|
||||||
|
|
|
@ -15,10 +15,12 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/databricks/cli/internal/testutil"
|
"github.com/databricks/cli/internal/testutil"
|
||||||
"github.com/databricks/cli/libs/env"
|
"github.com/databricks/cli/libs/env"
|
||||||
"github.com/databricks/cli/libs/testdiff"
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/databricks/cli/libs/testserver"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -44,6 +46,7 @@ const (
|
||||||
EntryPointScript = "script"
|
EntryPointScript = "script"
|
||||||
CleanupScript = "script.cleanup"
|
CleanupScript = "script.cleanup"
|
||||||
PrepareScript = "script.prepare"
|
PrepareScript = "script.prepare"
|
||||||
|
MaxFileSize = 100_000
|
||||||
)
|
)
|
||||||
|
|
||||||
var Scripts = map[string]bool{
|
var Scripts = map[string]bool{
|
||||||
|
@ -60,12 +63,7 @@ func TestInprocessMode(t *testing.T) {
|
||||||
if InprocessMode {
|
if InprocessMode {
|
||||||
t.Skip("Already tested by TestAccept")
|
t.Skip("Already tested by TestAccept")
|
||||||
}
|
}
|
||||||
if runtime.GOOS == "windows" {
|
require.Equal(t, 1, testAccept(t, true, "selftest"))
|
||||||
// - catalogs A catalog is the first layer of Unity Catalog’s three-level namespace.
|
|
||||||
// + catalogs A catalog is the first layer of Unity Catalog<6F>s three-level namespace.
|
|
||||||
t.Skip("Fails on CI on unicode characters")
|
|
||||||
}
|
|
||||||
require.NotZero(t, testAccept(t, true, "help"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
|
@ -93,23 +91,24 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Setenv("CLI", execPath)
|
t.Setenv("CLI", execPath)
|
||||||
repls.Set(execPath, "$CLI")
|
repls.SetPath(execPath, "$CLI")
|
||||||
|
|
||||||
// Make helper scripts available
|
// Make helper scripts available
|
||||||
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||||
|
|
||||||
tempHomeDir := t.TempDir()
|
tempHomeDir := t.TempDir()
|
||||||
repls.Set(tempHomeDir, "$TMPHOME")
|
repls.SetPath(tempHomeDir, "$TMPHOME")
|
||||||
t.Logf("$TMPHOME=%v", tempHomeDir)
|
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||||
|
|
||||||
// Prevent CLI from downloading terraform in each test:
|
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
|
||||||
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
uvCache := getUVDefaultCacheDir(t)
|
||||||
|
t.Setenv("UV_CACHE_DIR", uvCache)
|
||||||
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||||
|
|
||||||
if cloudEnv == "" {
|
if cloudEnv == "" {
|
||||||
server := testutil.StartServer(t)
|
server := testserver.New(t)
|
||||||
AddHandlers(server)
|
AddHandlers(server)
|
||||||
// Redirect API access to local server:
|
// Redirect API access to local server:
|
||||||
t.Setenv("DATABRICKS_HOST", server.URL)
|
t.Setenv("DATABRICKS_HOST", server.URL)
|
||||||
|
@ -118,6 +117,9 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
homeDir := t.TempDir()
|
homeDir := t.TempDir()
|
||||||
// Do not read user's ~/.databrickscfg
|
// Do not read user's ~/.databrickscfg
|
||||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||||
|
|
||||||
|
// Prevent CLI from downloading terraform in each test:
|
||||||
|
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
workspaceClient, err := databricks.NewWorkspaceClient()
|
workspaceClient, err := databricks.NewWorkspaceClient()
|
||||||
|
@ -129,6 +131,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
||||||
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||||
testdiff.PrepareReplacementsUUID(t, &repls)
|
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||||
|
testdiff.PrepareReplacementsDevVersion(t, &repls)
|
||||||
|
|
||||||
testDirs := getTests(t)
|
testDirs := getTests(t)
|
||||||
require.NotEmpty(t, testDirs)
|
require.NotEmpty(t, testDirs)
|
||||||
|
@ -175,6 +178,13 @@ func getTests(t *testing.T) []string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
|
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
|
||||||
|
config, configPath := LoadConfig(t, dir)
|
||||||
|
|
||||||
|
isEnabled, isPresent := config.GOOS[runtime.GOOS]
|
||||||
|
if isPresent && !isEnabled {
|
||||||
|
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
||||||
|
}
|
||||||
|
|
||||||
var tmpDir string
|
var tmpDir string
|
||||||
var err error
|
var err error
|
||||||
if KeepTmp {
|
if KeepTmp {
|
||||||
|
@ -187,12 +197,8 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
tmpDir = t.TempDir()
|
tmpDir = t.TempDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Converts C:\Users\DENIS~1.BIL -> C:\Users\denis.bilenko
|
|
||||||
tmpDirEvalled, err1 := filepath.EvalSymlinks(tmpDir)
|
|
||||||
if err1 == nil && tmpDirEvalled != tmpDir {
|
|
||||||
repls.SetPathWithParents(tmpDirEvalled, "$TMPDIR")
|
|
||||||
}
|
|
||||||
repls.SetPathWithParents(tmpDir, "$TMPDIR")
|
repls.SetPathWithParents(tmpDir, "$TMPDIR")
|
||||||
|
repls.Repls = append(repls.Repls, config.Repls...)
|
||||||
|
|
||||||
scriptContents := readMergedScriptContents(t, dir)
|
scriptContents := readMergedScriptContents(t, dir)
|
||||||
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||||
|
@ -226,9 +232,11 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
formatOutput(out, err)
|
formatOutput(out, err)
|
||||||
require.NoError(t, out.Close())
|
require.NoError(t, out.Close())
|
||||||
|
|
||||||
|
printedRepls := false
|
||||||
|
|
||||||
// Compare expected outputs
|
// Compare expected outputs
|
||||||
for relPath := range outputs {
|
for relPath := range outputs {
|
||||||
doComparison(t, repls, dir, tmpDir, relPath)
|
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure there are not unaccounted for new files
|
// Make sure there are not unaccounted for new files
|
||||||
|
@ -240,26 +248,27 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
if _, ok := outputs[relPath]; ok {
|
if _, ok := outputs[relPath]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
t.Errorf("Unexpected output: %s", relPath)
|
||||||
if strings.HasPrefix(relPath, "out") {
|
if strings.HasPrefix(relPath, "out") {
|
||||||
// We have a new file starting with "out"
|
// We have a new file starting with "out"
|
||||||
// Show the contents & support overwrite mode for it:
|
// Show the contents & support overwrite mode for it:
|
||||||
doComparison(t, repls, dir, tmpDir, relPath)
|
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string) {
|
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
|
||||||
pathRef := filepath.Join(dirRef, relPath)
|
pathRef := filepath.Join(dirRef, relPath)
|
||||||
pathNew := filepath.Join(dirNew, relPath)
|
pathNew := filepath.Join(dirNew, relPath)
|
||||||
bufRef, okRef := readIfExists(t, pathRef)
|
bufRef, okRef := tryReading(t, pathRef)
|
||||||
bufNew, okNew := readIfExists(t, pathNew)
|
bufNew, okNew := tryReading(t, pathNew)
|
||||||
if !okRef && !okNew {
|
if !okRef && !okNew {
|
||||||
t.Errorf("Both files are missing: %s, %s", pathRef, pathNew)
|
t.Errorf("Both files are missing or have errors: %s, %s", pathRef, pathNew)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
valueRef := testdiff.NormalizeNewlines(string(bufRef))
|
valueRef := testdiff.NormalizeNewlines(bufRef)
|
||||||
valueNew := testdiff.NormalizeNewlines(string(bufNew))
|
valueNew := testdiff.NormalizeNewlines(bufNew)
|
||||||
|
|
||||||
// Apply replacements to the new value only.
|
// Apply replacements to the new value only.
|
||||||
// The reference value is stored after applying replacements.
|
// The reference value is stored after applying replacements.
|
||||||
|
@ -293,6 +302,15 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
|
||||||
t.Logf("Overwriting existing output file: %s", relPath)
|
t.Logf("Overwriting existing output file: %s", relPath)
|
||||||
testutil.WriteFile(t, pathRef, valueNew)
|
testutil.WriteFile(t, pathRef, valueNew)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if !equal && printedRepls != nil && !*printedRepls {
|
||||||
|
*printedRepls = true
|
||||||
|
var items []string
|
||||||
|
for _, item := range repls.Repls {
|
||||||
|
items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
|
||||||
|
}
|
||||||
|
t.Log("Available replacements:\n" + strings.Join(items, "\n"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
||||||
|
@ -308,14 +326,14 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
cleanups := []string{}
|
cleanups := []string{}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
x, ok := readIfExists(t, filepath.Join(dir, CleanupScript))
|
x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
|
||||||
if ok {
|
if ok {
|
||||||
cleanups = append(cleanups, string(x))
|
cleanups = append(cleanups, x)
|
||||||
}
|
}
|
||||||
|
|
||||||
x, ok = readIfExists(t, filepath.Join(dir, PrepareScript))
|
x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
|
||||||
if ok {
|
if ok {
|
||||||
prepares = append(prepares, string(x))
|
prepares = append(prepares, x)
|
||||||
}
|
}
|
||||||
|
|
||||||
if dir == "" || dir == "." {
|
if dir == "" || dir == "." {
|
||||||
|
@ -402,16 +420,33 @@ func formatOutput(w io.Writer, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func readIfExists(t *testing.T, path string) ([]byte, bool) {
|
func tryReading(t *testing.T, path string) (string, bool) {
|
||||||
data, err := os.ReadFile(path)
|
info, err := os.Stat(path)
|
||||||
if err == nil {
|
if err != nil {
|
||||||
return data, true
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
|
t.Errorf("%s: %s", path, err)
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.Is(err, os.ErrNotExist) {
|
if info.Size() > MaxFileSize {
|
||||||
t.Fatalf("%s: %s", path, err)
|
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
|
||||||
|
return "", false
|
||||||
}
|
}
|
||||||
return []byte{}, false
|
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
// already checked ErrNotExist above
|
||||||
|
t.Errorf("%s: %s", path, err)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
if !utf8.Valid(data) {
|
||||||
|
t.Errorf("%s: not valid utf-8", path)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(data), true
|
||||||
}
|
}
|
||||||
|
|
||||||
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||||
|
@ -477,3 +512,16 @@ func ListDir(t *testing.T, src string) []string {
|
||||||
}
|
}
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getUVDefaultCacheDir(t *testing.T) string {
|
||||||
|
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
|
||||||
|
// the default cache directory is
|
||||||
|
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
|
||||||
|
cacheDir, err := os.UserCacheDir()
|
||||||
|
require.NoError(t, err)
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return cacheDir + "\\uv\\cache"
|
||||||
|
} else {
|
||||||
|
return cacheDir + "/uv"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
bundle:
|
||||||
|
name: git-permerror
|
|
@ -0,0 +1,72 @@
|
||||||
|
=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git
|
||||||
|
|
||||||
|
>>> $CLI bundle validate
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
Name: git-permerror
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/git-permerror/default
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b $CLI bundle validate -o json
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git/HEAD
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b $CLI bundle validate -o json
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git/config
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b $CLI bundle validate -o json
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
mkdir myrepo
|
||||||
|
cd myrepo
|
||||||
|
cp ../databricks.yml .
|
||||||
|
git-repo-init
|
||||||
|
mkdir -p subdir/a/b
|
||||||
|
|
||||||
|
printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
|
||||||
|
trace chmod 000 .git
|
||||||
|
errcode trace $CLI bundle validate
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
|
||||||
|
chmod 700 .git
|
||||||
|
trace chmod 000 .git/HEAD
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
|
||||||
|
chmod 666 .git/HEAD
|
||||||
|
trace chmod 000 .git/config
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
rm -fr myrepo
|
|
@ -0,0 +1,5 @@
|
||||||
|
Badness = "Warning logs not shown; inferred flag is set to true incorrect; bundle_root_path is not correct"
|
||||||
|
|
||||||
|
[GOOS]
|
||||||
|
# This test relies on chmod which does not work on Windows
|
||||||
|
windows = false
|
|
@ -0,0 +1,6 @@
|
||||||
|
bundle:
|
||||||
|
name: non_yaml_in_includes
|
||||||
|
|
||||||
|
include:
|
||||||
|
- test.py
|
||||||
|
- resources/*.yml
|
|
@ -0,0 +1,10 @@
|
||||||
|
Error: Files in the 'include' configuration section must be YAML files.
|
||||||
|
in databricks.yml:5:4
|
||||||
|
|
||||||
|
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
|
||||||
|
|
||||||
|
Name: non_yaml_in_includes
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate
|
|
@ -0,0 +1 @@
|
||||||
|
print("Hello world")
|
|
@ -1,8 +1,6 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -t development -o json
|
>>> $CLI bundle validate -t development -o json
|
||||||
|
|
||||||
Exit code: 0
|
|
||||||
|
|
||||||
>>> $CLI bundle validate -t error
|
>>> $CLI bundle validate -t error
|
||||||
Error: notebook this value is overridden not found. Local notebook references are expected
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
|
@ -1,8 +1,6 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -t development -o json
|
>>> $CLI bundle validate -t development -o json
|
||||||
|
|
||||||
Exit code: 0
|
|
||||||
|
|
||||||
>>> $CLI bundle validate -t error
|
>>> $CLI bundle validate -t error
|
||||||
Error: notebook this value is overridden not found. Local notebook references are expected
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
bundle:
|
||||||
|
name: scripts
|
||||||
|
|
||||||
|
experimental:
|
||||||
|
scripts:
|
||||||
|
preinit: "python3 ./myscript.py $EXITCODE preinit"
|
||||||
|
postinit: "python3 ./myscript.py 0 postinit"
|
||||||
|
prebuild: "python3 ./myscript.py 0 prebuild"
|
||||||
|
postbuild: "python3 ./myscript.py 0 postbuild"
|
||||||
|
predeploy: "python3 ./myscript.py 0 predeploy"
|
||||||
|
postdeploy: "python3 ./myscript.py 0 postdeploy"
|
|
@ -0,0 +1,8 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
info = " ".join(sys.argv[1:])
|
||||||
|
sys.stderr.write(f"from myscript.py {info}: hello stderr!\n")
|
||||||
|
sys.stdout.write(f"from myscript.py {info}: hello stdout!\n")
|
||||||
|
|
||||||
|
exitcode = int(sys.argv[1])
|
||||||
|
sys.exit(exitcode)
|
|
@ -0,0 +1,52 @@
|
||||||
|
|
||||||
|
>>> EXITCODE=0 errcode $CLI bundle validate
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 0 preinit: hello stdout!
|
||||||
|
from myscript.py 0 preinit: hello stderr!
|
||||||
|
Executing 'postinit' script
|
||||||
|
from myscript.py 0 postinit: hello stdout!
|
||||||
|
from myscript.py 0 postinit: hello stderr!
|
||||||
|
Name: scripts
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/scripts/default
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> EXITCODE=1 errcode $CLI bundle validate
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 1 preinit: hello stdout!
|
||||||
|
from myscript.py 1 preinit: hello stderr!
|
||||||
|
Error: failed to execute script: exit status 1
|
||||||
|
|
||||||
|
Name: scripts
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> EXITCODE=0 errcode $CLI bundle deploy
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 0 preinit: hello stdout!
|
||||||
|
from myscript.py 0 preinit: hello stderr!
|
||||||
|
Executing 'postinit' script
|
||||||
|
from myscript.py 0 postinit: hello stdout!
|
||||||
|
from myscript.py 0 postinit: hello stderr!
|
||||||
|
Executing 'prebuild' script
|
||||||
|
from myscript.py 0 prebuild: hello stdout!
|
||||||
|
from myscript.py 0 prebuild: hello stderr!
|
||||||
|
Executing 'postbuild' script
|
||||||
|
from myscript.py 0 postbuild: hello stdout!
|
||||||
|
from myscript.py 0 postbuild: hello stderr!
|
||||||
|
Executing 'predeploy' script
|
||||||
|
from myscript.py 0 predeploy: hello stdout!
|
||||||
|
from myscript.py 0 predeploy: hello stderr!
|
||||||
|
Error: unable to deploy to /Workspace/Users/$USERNAME/.bundle/scripts/default/state as $USERNAME.
|
||||||
|
Please make sure the current user or one of their groups is listed under the permissions of this bundle.
|
||||||
|
For assistance, contact the owners of this project.
|
||||||
|
They may need to redeploy the bundle to apply the new permissions.
|
||||||
|
Please refer to https://docs.databricks.com/dev-tools/bundles/permissions.html for more on managing permissions.
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,3 @@
|
||||||
|
trace EXITCODE=0 errcode $CLI bundle validate
|
||||||
|
trace EXITCODE=1 errcode $CLI bundle validate
|
||||||
|
trace EXITCODE=0 errcode $CLI bundle deploy
|
|
@ -3,4 +3,6 @@ mkdir myrepo
|
||||||
cd myrepo
|
cd myrepo
|
||||||
cp ../databricks.yml .
|
cp ../databricks.yml .
|
||||||
git-repo-init
|
git-repo-init
|
||||||
$CLI bundle validate | sed 's/\\\\/\//g'
|
errcode $CLI bundle validate
|
||||||
|
cd ..
|
||||||
|
rm -fr myrepo
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\\\\myrepo'
|
||||||
|
New = '/myrepo'
|
|
@ -3,7 +3,7 @@
|
||||||
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
||||||
bundle:
|
bundle:
|
||||||
name: my_dbt_sql
|
name: my_dbt_sql
|
||||||
uuid: <UUID>
|
uuid: [UUID]
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- resources/*.yml
|
- resources/*.yml
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
||||||
bundle:
|
bundle:
|
||||||
name: my_default_python
|
name: my_default_python
|
||||||
uuid: <UUID>
|
uuid: [UUID]
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- resources/*.yml
|
- resources/*.yml
|
||||||
|
|
|
@ -20,7 +20,7 @@
|
||||||
"rowLimit": 10000
|
"rowLimit": 10000
|
||||||
},
|
},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@
|
||||||
"rowLimit": 10000
|
"rowLimit": 10000
|
||||||
},
|
},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
||||||
bundle:
|
bundle:
|
||||||
name: my_default_sql
|
name: my_default_sql
|
||||||
uuid: <UUID>
|
uuid: [UUID]
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- resources/*.yml
|
- resources/*.yml
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,8 @@ Please refer to the README.md file for "getting started" instructions.
|
||||||
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
||||||
|
|
||||||
>>> $CLI bundle validate -t dev --output json
|
>>> $CLI bundle validate -t dev --output json
|
||||||
|
Warning: Ignoring Databricks CLI version constraint for development build. Required: >= 0.238.0, current: $DEV_VERSION
|
||||||
|
|
||||||
{
|
{
|
||||||
"jobs": {
|
"jobs": {
|
||||||
"my_jobs_as_code_job": {
|
"my_jobs_as_code_job": {
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
|
||||||
bundle:
|
bundle:
|
||||||
name: my_jobs_as_code
|
name: my_jobs_as_code
|
||||||
uuid: <UUID>
|
uuid: [UUID]
|
||||||
|
databricks_cli_version: ">= 0.238.0"
|
||||||
|
|
||||||
experimental:
|
experimental:
|
||||||
python:
|
python:
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
"application/vnd.databricks.v1+cell": {
|
"application/vnd.databricks.v1+cell": {
|
||||||
"cellMetadata": {},
|
"cellMetadata": {},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@
|
||||||
"rowLimit": 10000
|
"rowLimit": 10000
|
||||||
},
|
},
|
||||||
"inputWidgets": {},
|
"inputWidgets": {},
|
||||||
"nuid": "<UUID>",
|
"nuid": "[UUID]",
|
||||||
"showTitle": false,
|
"showTitle": false,
|
||||||
"title": ""
|
"title": ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,11 +3,7 @@ trace $CLI bundle init experimental-jobs-as-code --config-file ./input.json --ou
|
||||||
cd output/my_jobs_as_code
|
cd output/my_jobs_as_code
|
||||||
|
|
||||||
# silence uv output because it's non-deterministic
|
# silence uv output because it's non-deterministic
|
||||||
uv sync 2> /dev/null
|
uv sync -q
|
||||||
|
|
||||||
# remove version constraint because it always creates a warning on dev builds
|
|
||||||
cat databricks.yml | grep -v databricks_cli_version > databricks.yml.new
|
|
||||||
mv databricks.yml.new databricks.yml
|
|
||||||
|
|
||||||
trace $CLI bundle validate -t dev --output json | jq ".resources"
|
trace $CLI bundle validate -t dev --output json | jq ".resources"
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
Error: not a bundle template: expected to find a template schema file at databricks_template_schema.json
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
export NO_COLOR=1
|
||||||
|
$CLI bundle init /DOES/NOT/EXIST
|
|
@ -0,0 +1 @@
|
||||||
|
Badness = 'The error message should include full path: "expected to find a template schema file at databricks_template_schema.json"'
|
|
@ -0,0 +1,5 @@
|
||||||
|
Error: git clone failed: git clone https://invalid-domain-123.databricks.com/hello/world $TMPDIR_GPARENT/world-123456 --no-tags --depth=1: exit status 128. Cloning into '$TMPDIR_GPARENT/world-123456'...
|
||||||
|
fatal: unable to access 'https://invalid-domain-123.databricks.com/hello/world/': Could not resolve host: invalid-domain-123.databricks.com
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
export NO_COLOR=1
|
||||||
|
$CLI bundle init https://invalid-domain-123.databricks.com/hello/world
|
|
@ -0,0 +1,7 @@
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\\'
|
||||||
|
New = '/'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '/world-[0-9]+'
|
||||||
|
New = '/world-123456'
|
|
@ -1,7 +1,5 @@
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate --var a=one -o json
|
>>> errcode $CLI bundle validate --var a=one -o json
|
||||||
|
|
||||||
Exit code: 0
|
|
||||||
{
|
{
|
||||||
"a": {
|
"a": {
|
||||||
"default": "hello",
|
"default": "hello",
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
bundle:
|
||||||
|
name: double_underscore
|
||||||
|
|
||||||
|
variables:
|
||||||
|
double__underscore:
|
||||||
|
description: "This is a variable with a double underscore"
|
||||||
|
default: "default"
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
test_job:
|
||||||
|
name: "test"
|
||||||
|
tasks:
|
||||||
|
- task_key: "test ${var.double__underscore}"
|
|
@ -0,0 +1,7 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"task_key": "test default"
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle validate -o json | jq .resources.jobs.test_job.tasks
|
|
@ -1,4 +1,4 @@
|
||||||
Error: no value assigned to required variable a. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_a environment variable
|
Error: no value assigned to required variable a. Assignment can be done using "--var", by setting the BUNDLE_VAR_a environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
|
||||||
|
|
||||||
Name: empty${var.a}
|
Name: empty${var.a}
|
||||||
Target: default
|
Target: default
|
||||||
|
|
|
@ -18,12 +18,13 @@ variables:
|
||||||
description: variable with lookup
|
description: variable with lookup
|
||||||
lookup:
|
lookup:
|
||||||
cluster_policy: wrong-cluster-policy
|
cluster_policy: wrong-cluster-policy
|
||||||
|
|
||||||
|
result:
|
||||||
|
default: ${var.a} ${var.b}
|
||||||
|
|
||||||
bundle:
|
bundle:
|
||||||
name: test bundle
|
name: test bundle
|
||||||
|
|
||||||
workspace:
|
|
||||||
profile: ${var.a} ${var.b}
|
|
||||||
|
|
||||||
targets:
|
targets:
|
||||||
env-with-single-variable-override:
|
env-with-single-variable-override:
|
||||||
variables:
|
variables:
|
||||||
|
|
|
@ -9,7 +9,7 @@
|
||||||
"prod-a env-var-b"
|
"prod-a env-var-b"
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
>>> errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
||||||
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
|
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
|
||||||
|
|
||||||
Name: test bundle
|
Name: test bundle
|
||||||
Target: env-missing-a-required-variable-assignment
|
Target: env-missing-a-required-variable-assignment
|
||||||
|
@ -36,5 +36,6 @@ Exit code: 1
|
||||||
"b": "prod-b",
|
"b": "prod-b",
|
||||||
"d": "4321",
|
"d": "4321",
|
||||||
"e": "1234",
|
"e": "1234",
|
||||||
"f": "9876"
|
"f": "9876",
|
||||||
|
"result": "default-a prod-b"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .workspace.profile
|
trace $CLI bundle validate -t env-with-single-variable-override -o json | jq .variables.result.value
|
||||||
trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
|
trace $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value
|
||||||
trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .workspace.profile
|
trace BUNDLE_VAR_b=env-var-b $CLI bundle validate -t env-with-two-variable-overrides -o json | jq .variables.result.value
|
||||||
trace errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
trace errcode $CLI bundle validate -t env-missing-a-required-variable-assignment
|
||||||
trace errcode $CLI bundle validate -t env-using-an-undefined-variable
|
trace errcode $CLI bundle validate -t env-using-an-undefined-variable
|
||||||
trace $CLI bundle validate -t env-overrides-lookup -o json | jq '.variables | map_values(.value)'
|
trace $CLI bundle validate -t env-overrides-lookup -o json | jq '.variables | map_values(.value)'
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"cluster_key": {
|
||||||
|
"node_type_id": "Standard_DS3_v2"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,7 @@
|
||||||
|
{
|
||||||
|
"cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2"
|
||||||
|
},
|
||||||
|
"cluster_key": "mlops_stacks-cluster",
|
||||||
|
"cluster_workers": 2
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
foo
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"cluster": "mlops_stacks-cluster"
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"cluster_key": "mlops_stacks-cluster-from-file"
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"cluster_key": "mlops_stacks-cluster",
|
||||||
|
"cluster_workers": 2
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
[
|
||||||
|
"foo"
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
!.databricks
|
|
@ -0,0 +1,53 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveVariablesFromFile
|
||||||
|
|
||||||
|
variables:
|
||||||
|
cluster:
|
||||||
|
type: "complex"
|
||||||
|
cluster_key:
|
||||||
|
cluster_workers:
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: ${var.cluster_key}
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: "${var.cluster.node_type_id}"
|
||||||
|
num_workers: ${var.cluster_workers}
|
||||||
|
|
||||||
|
targets:
|
||||||
|
default:
|
||||||
|
default: true
|
||||||
|
variables:
|
||||||
|
cluster_workers: 1
|
||||||
|
cluster:
|
||||||
|
node_type_id: "default"
|
||||||
|
cluster_key: "default"
|
||||||
|
|
||||||
|
without_defaults:
|
||||||
|
|
||||||
|
complex_to_string:
|
||||||
|
variables:
|
||||||
|
cluster_workers: 1
|
||||||
|
cluster:
|
||||||
|
node_type_id: "default"
|
||||||
|
cluster_key: "default"
|
||||||
|
|
||||||
|
string_to_complex:
|
||||||
|
variables:
|
||||||
|
cluster_workers: 1
|
||||||
|
cluster:
|
||||||
|
node_type_id: "default"
|
||||||
|
cluster_key: "default"
|
||||||
|
|
||||||
|
wrong_file_structure:
|
||||||
|
|
||||||
|
invalid_json:
|
||||||
|
|
||||||
|
with_value:
|
||||||
|
variables:
|
||||||
|
cluster_workers: 1
|
||||||
|
cluster:
|
||||||
|
node_type_id: "default"
|
||||||
|
cluster_key: cluster_key_value
|
|
@ -0,0 +1,113 @@
|
||||||
|
|
||||||
|
=== variable file
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
{
|
||||||
|
"job_cluster_key": "mlops_stacks-cluster",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== variable file and variable flag
|
||||||
|
>>> $CLI bundle validate -o json --var=cluster_key=mlops_stacks-cluster-overriden
|
||||||
|
{
|
||||||
|
"job_cluster_key": "mlops_stacks-cluster-overriden",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== variable file and environment variable
|
||||||
|
>>> BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json
|
||||||
|
{
|
||||||
|
"job_cluster_key": "mlops_stacks-cluster-overriden",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== variable has value in config file
|
||||||
|
>>> $CLI bundle validate -o json --target with_value
|
||||||
|
{
|
||||||
|
"job_cluster_key": "mlops_stacks-cluster-from-file",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "default",
|
||||||
|
"num_workers": 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== file cannot be parsed
|
||||||
|
>>> errcode $CLI bundle validate -o json --target invalid_json
|
||||||
|
Error: failed to parse variables file $TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json: error decoding JSON at :0:0: invalid character 'o' in literal false (expecting 'a')
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"job_cluster_key": "${var.cluster_key}",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "${var.cluster.node_type_id}",
|
||||||
|
"num_workers": "${var.cluster_workers}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== file has wrong structure
|
||||||
|
>>> errcode $CLI bundle validate -o json --target wrong_file_structure
|
||||||
|
Error: failed to parse variables file $TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json: invalid format
|
||||||
|
|
||||||
|
Variables file must be a JSON object with the following format:
|
||||||
|
{"var1": "value1", "var2": "value2"}
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"job_cluster_key": "${var.cluster_key}",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "${var.cluster.node_type_id}",
|
||||||
|
"num_workers": "${var.cluster_workers}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== file has variable that is complex but default is string
|
||||||
|
>>> errcode $CLI bundle validate -o json --target complex_to_string
|
||||||
|
Error: variable cluster_key is not of type complex, but the value in the variable file is a complex type
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"job_cluster_key": "${var.cluster_key}",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "${var.cluster.node_type_id}",
|
||||||
|
"num_workers": "${var.cluster_workers}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== file has variable that is string but default is complex
|
||||||
|
>>> errcode $CLI bundle validate -o json --target string_to_complex
|
||||||
|
Error: variable cluster is of type complex, but the value in the variable file is not a complex type
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"job_cluster_key": "${var.cluster_key}",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "${var.cluster.node_type_id}",
|
||||||
|
"num_workers": "${var.cluster_workers}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
=== variable is required but it's not provided in the file
|
||||||
|
>>> errcode $CLI bundle validate -o json --target without_defaults
|
||||||
|
Error: no value assigned to required variable cluster. Assignment can be done using "--var", by setting the BUNDLE_VAR_cluster environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"job_cluster_key": "${var.cluster_key}",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "${var.cluster.node_type_id}",
|
||||||
|
"num_workers": "${var.cluster_workers}"
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
cluster_expr=".resources.jobs.job1.job_clusters[0]"
|
||||||
|
|
||||||
|
# defaults from variable file, see .databricks/bundle/<target>/variable-overrides.json
|
||||||
|
|
||||||
|
title "variable file"
|
||||||
|
trace $CLI bundle validate -o json | jq $cluster_expr
|
||||||
|
|
||||||
|
title "variable file and variable flag"
|
||||||
|
trace $CLI bundle validate -o json --var="cluster_key=mlops_stacks-cluster-overriden" | jq $cluster_expr
|
||||||
|
|
||||||
|
title "variable file and environment variable"
|
||||||
|
trace BUNDLE_VAR_cluster_key=mlops_stacks-cluster-overriden $CLI bundle validate -o json | jq $cluster_expr
|
||||||
|
|
||||||
|
title "variable has value in config file"
|
||||||
|
trace $CLI bundle validate -o json --target with_value | jq $cluster_expr
|
||||||
|
|
||||||
|
title "file cannot be parsed"
|
||||||
|
trace errcode $CLI bundle validate -o json --target invalid_json | jq $cluster_expr
|
||||||
|
|
||||||
|
title "file has wrong structure"
|
||||||
|
trace errcode $CLI bundle validate -o json --target wrong_file_structure | jq $cluster_expr
|
||||||
|
|
||||||
|
title "file has variable that is complex but default is string"
|
||||||
|
trace errcode $CLI bundle validate -o json --target complex_to_string | jq $cluster_expr
|
||||||
|
|
||||||
|
title "file has variable that is string but default is complex"
|
||||||
|
trace errcode $CLI bundle validate -o json --target string_to_complex | jq $cluster_expr
|
||||||
|
|
||||||
|
title "variable is required but it's not provided in the file"
|
||||||
|
trace errcode $CLI bundle validate -o json --target without_defaults | jq $cluster_expr
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Fix for windows
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\$TMPDIR\\.databricks\\bundle\\wrong_file_structure\\variable-overrides.json'
|
||||||
|
New = '$$TMPDIR/.databricks/bundle/wrong_file_structure/variable-overrides.json'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\$TMPDIR\\.databricks\\bundle\\invalid_json\\variable-overrides.json'
|
||||||
|
New = '$$TMPDIR/.databricks/bundle/invalid_json/variable-overrides.json'
|
|
@ -3,8 +3,7 @@
|
||||||
"bundle": {
|
"bundle": {
|
||||||
"environment": "dev",
|
"environment": "dev",
|
||||||
"git": {
|
"git": {
|
||||||
"bundle_root_path": ".",
|
"bundle_root_path": "."
|
||||||
"inferred": true
|
|
||||||
},
|
},
|
||||||
"target": "dev",
|
"target": "dev",
|
||||||
"terraform": {
|
"terraform": {
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"abc def"
|
"abc def"
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate
|
>>> errcode $CLI bundle validate
|
||||||
Error: no value assigned to required variable b. Assignment can be done through the "--var" flag or by setting the BUNDLE_VAR_b environment variable
|
Error: no value assigned to required variable b. Assignment can be done using "--var", by setting the BUNDLE_VAR_b environment variable, or in .databricks/bundle/<target>/variable-overrides.json file
|
||||||
|
|
||||||
Name: ${var.a} ${var.b}
|
Name: ${var.a} ${var.b}
|
||||||
Target: default
|
Target: default
|
||||||
|
|
|
@ -8,12 +8,12 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/internal/testcli"
|
"github.com/databricks/cli/internal/testcli"
|
||||||
"github.com/databricks/cli/internal/testutil"
|
"github.com/databricks/cli/libs/testserver"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func StartCmdServer(t *testing.T) *testutil.Server {
|
func StartCmdServer(t *testing.T) *testserver.Server {
|
||||||
server := testutil.StartServer(t)
|
server := StartServer(t)
|
||||||
server.Handle("/", func(r *http.Request) (any, error) {
|
server.Handle("/", func(r *http.Request) (any, error) {
|
||||||
q := r.URL.Query()
|
q := r.URL.Query()
|
||||||
args := strings.Split(q.Get("args"), " ")
|
args := strings.Split(q.Get("args"), " ")
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
package acceptance_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sync"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/BurntSushi/toml"
|
||||||
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
const configFilename = "test.toml"
|
||||||
|
|
||||||
|
var (
|
||||||
|
configCache map[string]TestConfig
|
||||||
|
configMutex sync.Mutex
|
||||||
|
)
|
||||||
|
|
||||||
|
type TestConfig struct {
|
||||||
|
// Place to describe what's wrong with this test. Does not affect how the test is run.
|
||||||
|
Badness string
|
||||||
|
|
||||||
|
// Which OSes the test is enabled on. Each string is compared against runtime.GOOS.
|
||||||
|
// If absent, default to true.
|
||||||
|
GOOS map[string]bool
|
||||||
|
|
||||||
|
// List of additional replacements to apply on this test.
|
||||||
|
// Old is a regexp, New is a replacement expression.
|
||||||
|
Repls []testdiff.Replacement
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindConfig finds the closest config file.
|
||||||
|
func FindConfig(t *testing.T, dir string) (string, bool) {
|
||||||
|
shared := false
|
||||||
|
for {
|
||||||
|
path := filepath.Join(dir, configFilename)
|
||||||
|
_, err := os.Stat(path)
|
||||||
|
|
||||||
|
if err == nil {
|
||||||
|
return path, shared
|
||||||
|
}
|
||||||
|
|
||||||
|
shared = true
|
||||||
|
|
||||||
|
if dir == "" || dir == "." {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if os.IsNotExist(err) {
|
||||||
|
dir = filepath.Dir(dir)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Fatalf("Error while reading %s: %s", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Fatal("Config not found: " + configFilename)
|
||||||
|
return "", shared
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadConfig loads the config file. Non-leaf configs are cached.
|
||||||
|
func LoadConfig(t *testing.T, dir string) (TestConfig, string) {
|
||||||
|
path, leafConfig := FindConfig(t, dir)
|
||||||
|
|
||||||
|
if leafConfig {
|
||||||
|
return DoLoadConfig(t, path), path
|
||||||
|
}
|
||||||
|
|
||||||
|
configMutex.Lock()
|
||||||
|
defer configMutex.Unlock()
|
||||||
|
|
||||||
|
if configCache == nil {
|
||||||
|
configCache = make(map[string]TestConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
result, ok := configCache[path]
|
||||||
|
if ok {
|
||||||
|
return result, path
|
||||||
|
}
|
||||||
|
|
||||||
|
result = DoLoadConfig(t, path)
|
||||||
|
configCache[path] = result
|
||||||
|
return result, path
|
||||||
|
}
|
||||||
|
|
||||||
|
func DoLoadConfig(t *testing.T, path string) TestConfig {
|
||||||
|
bytes, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("failed to read config: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
var config TestConfig
|
||||||
|
meta, err := toml.Decode(string(bytes), &config)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
keys := meta.Undecoded()
|
||||||
|
if len(keys) > 0 {
|
||||||
|
t.Fatalf("Undecoded keys in %s: %#v", path, keys)
|
||||||
|
}
|
||||||
|
|
||||||
|
return config
|
||||||
|
}
|
|
@ -6,7 +6,9 @@ errcode() {
|
||||||
local exit_code=$?
|
local exit_code=$?
|
||||||
# Re-enable 'set -e' if it was previously set
|
# Re-enable 'set -e' if it was previously set
|
||||||
set -e
|
set -e
|
||||||
|
if [ $exit_code -ne 0 ]; then
|
||||||
>&2 printf "\nExit code: $exit_code\n"
|
>&2 printf "\nExit code: $exit_code\n"
|
||||||
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
trace() {
|
trace() {
|
||||||
|
@ -37,6 +39,23 @@ git-repo-init() {
|
||||||
git config core.autocrlf false
|
git config core.autocrlf false
|
||||||
git config user.name "Tester"
|
git config user.name "Tester"
|
||||||
git config user.email "tester@databricks.com"
|
git config user.email "tester@databricks.com"
|
||||||
|
git config core.hooksPath no-hooks
|
||||||
git add databricks.yml
|
git add databricks.yml
|
||||||
git commit -qm 'Add databricks.yml'
|
git commit -qm 'Add databricks.yml'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
title() {
|
||||||
|
local label="$1"
|
||||||
|
printf "\n=== %s" "$label"
|
||||||
|
}
|
||||||
|
|
||||||
|
withdir() {
|
||||||
|
local dir="$1"
|
||||||
|
shift
|
||||||
|
local orig_dir="$(pwd)"
|
||||||
|
cd "$dir" || return $?
|
||||||
|
"$@"
|
||||||
|
local exit_code=$?
|
||||||
|
cd "$orig_dir" || return $?
|
||||||
|
return $exit_code
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
HELLO
|
|
@ -0,0 +1,39 @@
|
||||||
|
=== Capturing STDERR
|
||||||
|
>>> python3 -c import sys; sys.stderr.write("STDERR\n")
|
||||||
|
STDERR
|
||||||
|
|
||||||
|
=== Capturing STDOUT
|
||||||
|
>>> python3 -c import sys; sys.stderr.write("STDOUT\n")
|
||||||
|
STDOUT
|
||||||
|
|
||||||
|
=== Capturing exit code
|
||||||
|
>>> errcode python3 -c raise SystemExit(5)
|
||||||
|
|
||||||
|
Exit code: 5
|
||||||
|
|
||||||
|
=== Capturing exit code (alt)
|
||||||
|
>>> python3 -c raise SystemExit(7)
|
||||||
|
|
||||||
|
Exit code: 7
|
||||||
|
|
||||||
|
=== Capturing pwd
|
||||||
|
>>> python3 -c import os; print(os.getcwd())
|
||||||
|
$TMPDIR
|
||||||
|
|
||||||
|
=== Capturing subdir
|
||||||
|
>>> mkdir -p subdir/a/b/c
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b/c python3 -c import os; print(os.getcwd())
|
||||||
|
$TMPDIR/subdir/a/b/c
|
||||||
|
|
||||||
|
=== Custom output files - everything starting with out is captured and compared
|
||||||
|
>>> echo HELLO
|
||||||
|
|
||||||
|
=== Custom regex can be specified in [[Repl]] section
|
||||||
|
1234
|
||||||
|
CUSTOM_NUMBER_REGEX
|
||||||
|
123456
|
||||||
|
|
||||||
|
=== Testing --version
|
||||||
|
>>> $CLI --version
|
||||||
|
Databricks CLI v$DEV_VERSION
|
|
@ -0,0 +1,29 @@
|
||||||
|
printf "=== Capturing STDERR"
|
||||||
|
trace python3 -c 'import sys; sys.stderr.write("STDERR\n")'
|
||||||
|
|
||||||
|
printf "\n=== Capturing STDOUT"
|
||||||
|
trace python3 -c 'import sys; sys.stderr.write("STDOUT\n")'
|
||||||
|
|
||||||
|
printf "\n=== Capturing exit code"
|
||||||
|
trace errcode python3 -c 'raise SystemExit(5)'
|
||||||
|
|
||||||
|
printf "\n=== Capturing exit code (alt)"
|
||||||
|
errcode trace python3 -c 'raise SystemExit(7)'
|
||||||
|
|
||||||
|
printf "\n=== Capturing pwd"
|
||||||
|
trace python3 -c 'import os; print(os.getcwd())'
|
||||||
|
|
||||||
|
printf "\n=== Capturing subdir"
|
||||||
|
trace mkdir -p subdir/a/b/c
|
||||||
|
trace withdir subdir/a/b/c python3 -c 'import os; print(os.getcwd())'
|
||||||
|
|
||||||
|
printf "\n=== Custom output files - everything starting with out is captured and compared"
|
||||||
|
trace echo HELLO > out.hello.txt
|
||||||
|
|
||||||
|
printf "\n=== Custom regex can be specified in [[Repl]] section\n"
|
||||||
|
echo 1234
|
||||||
|
echo 12345
|
||||||
|
echo 123456
|
||||||
|
|
||||||
|
printf "\n=== Testing --version"
|
||||||
|
trace $CLI --version
|
|
@ -0,0 +1,20 @@
|
||||||
|
# Badness = "Brief description of what's wrong with the test output, if anything"
|
||||||
|
|
||||||
|
#[GOOS]
|
||||||
|
# Disable on Windows
|
||||||
|
#windows = false
|
||||||
|
|
||||||
|
# Disable on Mac
|
||||||
|
#mac = false
|
||||||
|
|
||||||
|
# Disable on Linux
|
||||||
|
#linux = false
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\b[0-9]{5}\b'
|
||||||
|
New = "CUSTOM_NUMBER_REGEX"
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
# Fix path with reverse slashes in the output for Windows.
|
||||||
|
Old = '\$TMPDIR\\subdir\\a\\b\\c'
|
||||||
|
New = '$$TMPDIR/subdir/a/b/c'
|
|
@ -2,16 +2,25 @@ package acceptance_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"testing"
|
||||||
|
|
||||||
"github.com/databricks/cli/internal/testutil"
|
"github.com/databricks/cli/libs/testserver"
|
||||||
"github.com/databricks/databricks-sdk-go/service/catalog"
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
"github.com/databricks/databricks-sdk-go/service/workspace"
|
"github.com/databricks/databricks-sdk-go/service/workspace"
|
||||||
)
|
)
|
||||||
|
|
||||||
func AddHandlers(server *testutil.Server) {
|
func StartServer(t *testing.T) *testserver.Server {
|
||||||
server.Handle("/api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
|
server := testserver.New(t)
|
||||||
|
t.Cleanup(func() {
|
||||||
|
server.Close()
|
||||||
|
})
|
||||||
|
return server
|
||||||
|
}
|
||||||
|
|
||||||
|
func AddHandlers(server *testserver.Server) {
|
||||||
|
server.Handle("GET /api/2.0/policies/clusters/list", func(r *http.Request) (any, error) {
|
||||||
return compute.ListPoliciesResponse{
|
return compute.ListPoliciesResponse{
|
||||||
Policies: []compute.Policy{
|
Policies: []compute.Policy{
|
||||||
{
|
{
|
||||||
|
@ -26,7 +35,7 @@ func AddHandlers(server *testutil.Server) {
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.0/instance-pools/list", func(r *http.Request) (any, error) {
|
||||||
return compute.ListInstancePools{
|
return compute.ListInstancePools{
|
||||||
InstancePools: []compute.InstancePoolAndStats{
|
InstancePools: []compute.InstancePoolAndStats{
|
||||||
{
|
{
|
||||||
|
@ -37,7 +46,7 @@ func AddHandlers(server *testutil.Server) {
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.1/clusters/list", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.1/clusters/list", func(r *http.Request) (any, error) {
|
||||||
return compute.ListClustersResponse{
|
return compute.ListClustersResponse{
|
||||||
Clusters: []compute.ClusterDetails{
|
Clusters: []compute.ClusterDetails{
|
||||||
{
|
{
|
||||||
|
@ -52,13 +61,13 @@ func AddHandlers(server *testutil.Server) {
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.0/preview/scim/v2/Me", func(r *http.Request) (any, error) {
|
||||||
return iam.User{
|
return iam.User{
|
||||||
UserName: "tester@databricks.com",
|
UserName: "tester@databricks.com",
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.0/workspace/get-status", func(r *http.Request) (any, error) {
|
||||||
return workspace.ObjectInfo{
|
return workspace.ObjectInfo{
|
||||||
ObjectId: 1001,
|
ObjectId: 1001,
|
||||||
ObjectType: "DIRECTORY",
|
ObjectType: "DIRECTORY",
|
||||||
|
@ -67,13 +76,13 @@ func AddHandlers(server *testutil.Server) {
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
|
||||||
return catalog.MetastoreAssignment{
|
return catalog.MetastoreAssignment{
|
||||||
DefaultCatalogName: "main",
|
DefaultCatalogName: "main",
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
|
server.Handle("GET /api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
|
||||||
return workspace.WorkspaceObjectPermissions{
|
return workspace.WorkspaceObjectPermissions{
|
||||||
ObjectId: "1001",
|
ObjectId: "1001",
|
||||||
ObjectType: "DIRECTORY",
|
ObjectType: "DIRECTORY",
|
||||||
|
@ -89,4 +98,8 @@ func AddHandlers(server *testutil.Server) {
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
|
server.Handle("POST /api/2.0/workspace/mkdirs", func(r *http.Request) (any, error) {
|
||||||
|
return "{}", nil
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
# If test directory nor any of its parents do not have test.toml then this file serves as fallback configuration.
|
||||||
|
# The configurations are not merged across parents; the closest one is used fully.
|
|
@ -1,29 +0,0 @@
|
||||||
package apps
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
|
||||||
"github.com/databricks/cli/libs/cmdio"
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
|
||||||
)
|
|
||||||
|
|
||||||
type slowDeployMessage struct{}
|
|
||||||
|
|
||||||
// TODO: needs to be removed when when no_compute option becomes available in TF provider and used in DABs
|
|
||||||
// See https://github.com/databricks/cli/pull/2144
|
|
||||||
func (v *slowDeployMessage) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
|
||||||
if len(b.Config.Resources.Apps) > 0 {
|
|
||||||
cmdio.LogString(ctx, "Note: Databricks apps included in this bundle may increase initial deployment time due to compute provisioning.")
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *slowDeployMessage) Name() string {
|
|
||||||
return "apps.SlowDeployMessage"
|
|
||||||
}
|
|
||||||
|
|
||||||
func SlowDeployMessage() bundle.Mutator {
|
|
||||||
return &slowDeployMessage{}
|
|
||||||
}
|
|
|
@ -72,6 +72,7 @@ type Bundle struct {
|
||||||
// It can be initialized on demand after loading the configuration.
|
// It can be initialized on demand after loading the configuration.
|
||||||
clientOnce sync.Once
|
clientOnce sync.Once
|
||||||
client *databricks.WorkspaceClient
|
client *databricks.WorkspaceClient
|
||||||
|
clientErr error
|
||||||
|
|
||||||
// Files that are synced to the workspace.file_path
|
// Files that are synced to the workspace.file_path
|
||||||
Files []fileset.File
|
Files []fileset.File
|
||||||
|
@ -134,23 +135,25 @@ func TryLoad(ctx context.Context) (*Bundle, error) {
|
||||||
return Load(ctx, root)
|
return Load(ctx, root)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Bundle) InitializeWorkspaceClient() (*databricks.WorkspaceClient, error) {
|
func (b *Bundle) WorkspaceClientE() (*databricks.WorkspaceClient, error) {
|
||||||
client, err := b.Config.Workspace.Client()
|
b.clientOnce.Do(func() {
|
||||||
|
var err error
|
||||||
|
b.client, err = b.Config.Workspace.Client()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("cannot resolve bundle auth configuration: %w", err)
|
b.clientErr = fmt.Errorf("cannot resolve bundle auth configuration: %w", err)
|
||||||
}
|
}
|
||||||
return client, nil
|
})
|
||||||
|
|
||||||
|
return b.client, b.clientErr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient {
|
func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient {
|
||||||
b.clientOnce.Do(func() {
|
client, err := b.WorkspaceClientE()
|
||||||
var err error
|
|
||||||
b.client, err = b.InitializeWorkspaceClient()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
})
|
|
||||||
return b.client
|
return client
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetWorkpaceClient sets the workspace client for this bundle.
|
// SetWorkpaceClient sets the workspace client for this bundle.
|
||||||
|
|
|
@ -8,9 +8,6 @@ type Git struct {
|
||||||
// Path to bundle root relative to the git repository root.
|
// Path to bundle root relative to the git repository root.
|
||||||
BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"`
|
BundleRootPath string `json:"bundle_root_path,omitempty" bundle:"readonly"`
|
||||||
|
|
||||||
// Inferred is set to true if the Git details were inferred and weren't set explicitly
|
|
||||||
Inferred bool `json:"inferred,omitempty" bundle:"readonly"`
|
|
||||||
|
|
||||||
// The actual branch according to Git (may be different from the configured branch)
|
// The actual branch according to Git (may be different from the configured branch)
|
||||||
ActualBranch string `json:"actual_branch,omitempty" bundle:"readonly"`
|
ActualBranch string `json:"actual_branch,omitempty" bundle:"readonly"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@ package loader
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"slices"
|
"slices"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -36,6 +37,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
|
||||||
// Maintain list of files in order of files being loaded.
|
// Maintain list of files in order of files being loaded.
|
||||||
// This is stored in the bundle configuration for observability.
|
// This is stored in the bundle configuration for observability.
|
||||||
var files []string
|
var files []string
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
// For each glob, find all files to load.
|
// For each glob, find all files to load.
|
||||||
// Ordering of the list of globs is maintained in the output.
|
// Ordering of the list of globs is maintained in the output.
|
||||||
|
@ -60,7 +62,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
|
||||||
|
|
||||||
// Filter matches to ones we haven't seen yet.
|
// Filter matches to ones we haven't seen yet.
|
||||||
var includes []string
|
var includes []string
|
||||||
for _, match := range matches {
|
for i, match := range matches {
|
||||||
rel, err := filepath.Rel(b.BundleRootPath, match)
|
rel, err := filepath.Rel(b.BundleRootPath, match)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return diag.FromErr(err)
|
return diag.FromErr(err)
|
||||||
|
@ -69,9 +71,22 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
seen[rel] = true
|
seen[rel] = true
|
||||||
|
if filepath.Ext(rel) != ".yaml" && filepath.Ext(rel) != ".yml" {
|
||||||
|
diags = diags.Append(diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: "Files in the 'include' configuration section must be YAML files.",
|
||||||
|
Detail: fmt.Sprintf("The file %s in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.", rel),
|
||||||
|
Locations: b.Config.GetLocations(fmt.Sprintf("include[%d]", i)),
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
includes = append(includes, rel)
|
includes = append(includes, rel)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(diags) > 0 {
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
|
||||||
// Add matches to list of mutators to return.
|
// Add matches to list of mutators to return.
|
||||||
slices.Sort(includes)
|
slices.Sort(includes)
|
||||||
files = append(files, includes...)
|
files = append(files, includes...)
|
||||||
|
|
|
@ -1,26 +0,0 @@
|
||||||
package mutator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
|
||||||
"github.com/databricks/cli/libs/diag"
|
|
||||||
)
|
|
||||||
|
|
||||||
type initializeWorkspaceClient struct{}
|
|
||||||
|
|
||||||
func InitializeWorkspaceClient() bundle.Mutator {
|
|
||||||
return &initializeWorkspaceClient{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *initializeWorkspaceClient) Name() string {
|
|
||||||
return "InitializeWorkspaceClient"
|
|
||||||
}
|
|
||||||
|
|
||||||
// Apply initializes the workspace client for the bundle. We do this here so
|
|
||||||
// downstream calls to b.WorkspaceClient() do not panic if there's an error in the
|
|
||||||
// auth configuration.
|
|
||||||
func (m *initializeWorkspaceClient) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnostics {
|
|
||||||
_, err := b.InitializeWorkspaceClient()
|
|
||||||
return diag.FromErr(err)
|
|
||||||
}
|
|
|
@ -40,7 +40,6 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn
|
||||||
b.Config.Bundle.Git.ActualBranch = info.CurrentBranch
|
b.Config.Bundle.Git.ActualBranch = info.CurrentBranch
|
||||||
if b.Config.Bundle.Git.Branch == "" {
|
if b.Config.Bundle.Git.Branch == "" {
|
||||||
// Only load branch if there's no user defined value
|
// Only load branch if there's no user defined value
|
||||||
b.Config.Bundle.Git.Inferred = true
|
|
||||||
b.Config.Bundle.Git.Branch = info.CurrentBranch
|
b.Config.Bundle.Git.Branch = info.CurrentBranch
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -135,11 +135,6 @@ func findNonUserPath(b *bundle.Bundle) string {
|
||||||
}
|
}
|
||||||
|
|
||||||
func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) diag.Diagnostics {
|
func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) diag.Diagnostics {
|
||||||
if b.Config.Bundle.Git.Inferred {
|
|
||||||
env := b.Config.Bundle.Target
|
|
||||||
log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env)
|
|
||||||
}
|
|
||||||
|
|
||||||
r := b.Config.Resources
|
r := b.Config.Resources
|
||||||
for i := range r.Pipelines {
|
for i := range r.Pipelines {
|
||||||
if r.Pipelines[i].Development {
|
if r.Pipelines[i].Development {
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
pathlib "path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
@ -99,7 +100,7 @@ func removeVirtualLocations(locations []dyn.Location) []dyn.Location {
|
||||||
// parsePythonLocations parses locations.json from the Python mutator.
|
// parsePythonLocations parses locations.json from the Python mutator.
|
||||||
//
|
//
|
||||||
// locations file is newline-separated JSON objects with pythonLocationEntry structure.
|
// locations file is newline-separated JSON objects with pythonLocationEntry structure.
|
||||||
func parsePythonLocations(input io.Reader) (*pythonLocations, error) {
|
func parsePythonLocations(bundleRoot string, input io.Reader) (*pythonLocations, error) {
|
||||||
decoder := json.NewDecoder(input)
|
decoder := json.NewDecoder(input)
|
||||||
locations := newPythonLocations()
|
locations := newPythonLocations()
|
||||||
|
|
||||||
|
@ -116,6 +117,12 @@ func parsePythonLocations(input io.Reader) (*pythonLocations, error) {
|
||||||
return nil, fmt.Errorf("failed to parse python location: %s", err)
|
return nil, fmt.Errorf("failed to parse python location: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Output can contain both relative paths and absolute paths outside of bundle root.
|
||||||
|
// Mutator pipeline expects all path to be absolute at this point, so make all paths absolute.
|
||||||
|
if !pathlib.IsAbs(entry.File) {
|
||||||
|
entry.File = filepath.Join(bundleRoot, entry.File)
|
||||||
|
}
|
||||||
|
|
||||||
location := dyn.Location{
|
location := dyn.Location{
|
||||||
File: entry.File,
|
File: entry.File,
|
||||||
Line: entry.Line,
|
Line: entry.Line,
|
||||||
|
|
|
@ -165,12 +165,28 @@ func TestLoadOutput(t *testing.T) {
|
||||||
require.Equal(t, filepath.Join(bundleRoot, generatedFileName), notebookPath.Locations()[0].File)
|
require.Equal(t, filepath.Join(bundleRoot, generatedFileName), notebookPath.Locations()[0].File)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParsePythonLocations(t *testing.T) {
|
func TestParsePythonLocations_absolutePath(t *testing.T) {
|
||||||
expected := dyn.Location{File: "foo.py", Line: 1, Column: 2}
|
// output can contain absolute path that is outside of the bundle root
|
||||||
|
expected := dyn.Location{File: "/Shared/foo.py", Line: 1, Column: 2}
|
||||||
|
|
||||||
input := `{"path": "foo", "file": "foo.py", "line": 1, "column": 2}`
|
input := `{"path": "foo", "file": "/Shared/foo.py", "line": 1, "column": 2}`
|
||||||
reader := bytes.NewReader([]byte(input))
|
reader := bytes.NewReader([]byte(input))
|
||||||
locations, err := parsePythonLocations(reader)
|
locations, err := parsePythonLocations("/tmp/", reader)
|
||||||
|
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
assert.True(t, locations.keys["foo"].exists)
|
||||||
|
assert.Equal(t, expected, locations.keys["foo"].location)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParsePythonLocations_relativePath(t *testing.T) {
|
||||||
|
// output can contain relative paths, we expect all locations to be absolute
|
||||||
|
// at this stage of mutator pipeline
|
||||||
|
expected := dyn.Location{File: filepath.Clean("/tmp/my_project/foo.py"), Line: 1, Column: 2}
|
||||||
|
|
||||||
|
input := `{"path": "foo", "file": "foo.py", "line": 1, "column": 2}`
|
||||||
|
reader := bytes.NewReader([]byte(input))
|
||||||
|
locations, err := parsePythonLocations(filepath.Clean("/tmp/my_project"), reader)
|
||||||
|
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
|
|
@ -331,7 +331,7 @@ func (m *pythonMutator) runPythonMutator(ctx context.Context, root dyn.Value, op
|
||||||
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
|
return dyn.InvalidValue, diag.Errorf("failed to load diagnostics: %s", pythonDiagnosticsErr)
|
||||||
}
|
}
|
||||||
|
|
||||||
locations, err := loadLocationsFile(locationsPath)
|
locations, err := loadLocationsFile(opts.bundleRootPath, locationsPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return dyn.InvalidValue, diag.Errorf("failed to load locations: %s", err)
|
return dyn.InvalidValue, diag.Errorf("failed to load locations: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -381,7 +381,7 @@ func writeInputFile(inputPath string, input dyn.Value) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// loadLocationsFile loads locations.json containing source locations for generated YAML.
|
// loadLocationsFile loads locations.json containing source locations for generated YAML.
|
||||||
func loadLocationsFile(locationsPath string) (*pythonLocations, error) {
|
func loadLocationsFile(bundleRoot, locationsPath string) (*pythonLocations, error) {
|
||||||
locationsFile, err := os.Open(locationsPath)
|
locationsFile, err := os.Open(locationsPath)
|
||||||
if errors.Is(err, fs.ErrNotExist) {
|
if errors.Is(err, fs.ErrNotExist) {
|
||||||
return newPythonLocations(), nil
|
return newPythonLocations(), nil
|
||||||
|
@ -391,7 +391,7 @@ func loadLocationsFile(locationsPath string) (*pythonLocations, error) {
|
||||||
|
|
||||||
defer locationsFile.Close()
|
defer locationsFile.Close()
|
||||||
|
|
||||||
return parsePythonLocations(locationsFile)
|
return parsePythonLocations(bundleRoot, locationsFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadOutputFile(rootPath, outputPath string, locations *pythonLocations) (dyn.Value, diag.Diagnostics) {
|
func loadOutputFile(rootPath, outputPath string, locations *pythonLocations) (dyn.Value, diag.Diagnostics) {
|
||||||
|
|
|
@ -54,6 +54,8 @@ func TestPythonMutator_Name_applyMutators(t *testing.T) {
|
||||||
func TestPythonMutator_loadResources(t *testing.T) {
|
func TestPythonMutator_loadResources(t *testing.T) {
|
||||||
withFakeVEnv(t, ".venv")
|
withFakeVEnv(t, ".venv")
|
||||||
|
|
||||||
|
rootPath := filepath.Join(t.TempDir(), "my_project")
|
||||||
|
|
||||||
b := loadYaml("databricks.yml", `
|
b := loadYaml("databricks.yml", `
|
||||||
experimental:
|
experimental:
|
||||||
python:
|
python:
|
||||||
|
@ -64,6 +66,9 @@ func TestPythonMutator_loadResources(t *testing.T) {
|
||||||
job0:
|
job0:
|
||||||
name: job_0`)
|
name: job_0`)
|
||||||
|
|
||||||
|
// set rootPath so that we can make absolute paths in dyn.Location
|
||||||
|
b.BundleRootPath = rootPath
|
||||||
|
|
||||||
ctx := withProcessStub(
|
ctx := withProcessStub(
|
||||||
t,
|
t,
|
||||||
[]string{
|
[]string{
|
||||||
|
@ -120,7 +125,7 @@ func TestPythonMutator_loadResources(t *testing.T) {
|
||||||
|
|
||||||
assert.Equal(t, []dyn.Location{
|
assert.Equal(t, []dyn.Location{
|
||||||
{
|
{
|
||||||
File: "src/examples/job1.py",
|
File: filepath.Join(rootPath, "src/examples/job1.py"),
|
||||||
Line: 5,
|
Line: 5,
|
||||||
Column: 7,
|
Column: 7,
|
||||||
},
|
},
|
||||||
|
|
|
@ -3,11 +3,14 @@ package mutator
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle"
|
"github.com/databricks/cli/bundle"
|
||||||
"github.com/databricks/cli/bundle/config/variable"
|
"github.com/databricks/cli/bundle/config/variable"
|
||||||
"github.com/databricks/cli/libs/diag"
|
"github.com/databricks/cli/libs/diag"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/jsonloader"
|
||||||
"github.com/databricks/cli/libs/env"
|
"github.com/databricks/cli/libs/env"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -23,7 +26,11 @@ func (m *setVariables) Name() string {
|
||||||
return "SetVariables"
|
return "SetVariables"
|
||||||
}
|
}
|
||||||
|
|
||||||
func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string) (dyn.Value, error) {
|
func getDefaultVariableFilePath(target string) string {
|
||||||
|
return ".databricks/bundle/" + target + "/variable-overrides.json"
|
||||||
|
}
|
||||||
|
|
||||||
|
func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, name string, fileDefault dyn.Value) (dyn.Value, error) {
|
||||||
// case: variable already has value initialized, so skip
|
// case: variable already has value initialized, so skip
|
||||||
if variable.HasValue() {
|
if variable.HasValue() {
|
||||||
return v, nil
|
return v, nil
|
||||||
|
@ -49,6 +56,26 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
|
||||||
return v, nil
|
return v, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// case: Set the variable to the default value from the variable file
|
||||||
|
if fileDefault.Kind() != dyn.KindInvalid && fileDefault.Kind() != dyn.KindNil {
|
||||||
|
hasComplexType := variable.IsComplex()
|
||||||
|
hasComplexValue := fileDefault.Kind() == dyn.KindMap || fileDefault.Kind() == dyn.KindSequence
|
||||||
|
|
||||||
|
if hasComplexType && !hasComplexValue {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf(`variable %s is of type complex, but the value in the variable file is not a complex type`, name)
|
||||||
|
}
|
||||||
|
if !hasComplexType && hasComplexValue {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf(`variable %s is not of type complex, but the value in the variable file is a complex type`, name)
|
||||||
|
}
|
||||||
|
|
||||||
|
v, err := dyn.Set(v, "value", fileDefault)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, fmt.Errorf(`failed to assign default value from variable file to variable %s with error: %v`, name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
// case: Set the variable to its default value
|
// case: Set the variable to its default value
|
||||||
if variable.HasDefault() {
|
if variable.HasDefault() {
|
||||||
vDefault, err := dyn.Get(v, "default")
|
vDefault, err := dyn.Get(v, "default")
|
||||||
|
@ -64,10 +91,43 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
|
||||||
}
|
}
|
||||||
|
|
||||||
// We should have had a value to set for the variable at this point.
|
// We should have had a value to set for the variable at this point.
|
||||||
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
|
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done using "--var", by setting the %s environment variable, or in %s file`, name, bundleVarPrefix+name, getDefaultVariableFilePath("<target>"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func readVariablesFromFile(b *bundle.Bundle) (dyn.Value, diag.Diagnostics) {
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
|
||||||
|
filePath := filepath.Join(b.BundleRootPath, getDefaultVariableFilePath(b.Config.Bundle.Target))
|
||||||
|
if _, err := os.Stat(filePath); err != nil {
|
||||||
|
return dyn.InvalidValue, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.ReadFile(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to read variables file: %w", err))
|
||||||
|
}
|
||||||
|
|
||||||
|
val, err := jsonloader.LoadJSON(f, filePath)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to parse variables file %s: %w", filePath, err))
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.Kind() != dyn.KindMap {
|
||||||
|
return dyn.InvalidValue, diags.Append(diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: fmt.Sprintf("failed to parse variables file %s: invalid format", filePath),
|
||||||
|
Detail: "Variables file must be a JSON object with the following format:\n{\"var1\": \"value1\", \"var2\": \"value2\"}",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return val, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
defaults, diags := readVariablesFromFile(b)
|
||||||
|
if diags.HasError() {
|
||||||
|
return diags
|
||||||
|
}
|
||||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||||
return dyn.Map(v, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
|
return dyn.Map(v, "variables", dyn.Foreach(func(p dyn.Path, variable dyn.Value) (dyn.Value, error) {
|
||||||
name := p[1].Key()
|
name := p[1].Key()
|
||||||
|
@ -76,9 +136,10 @@ func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
|
||||||
return dyn.InvalidValue, fmt.Errorf(`variable "%s" is not defined`, name)
|
return dyn.InvalidValue, fmt.Errorf(`variable "%s" is not defined`, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
return setVariable(ctx, variable, v, name)
|
fileDefault, _ := dyn.Get(defaults, name)
|
||||||
|
return setVariable(ctx, variable, v, name, fileDefault)
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
return diag.FromErr(err)
|
return diags.Extend(diag.FromErr(err))
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,7 +25,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
v, err = setVariable(context.Background(), v, &variable, "foo")
|
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = convert.ToTyped(&variable, v)
|
err = convert.ToTyped(&variable, v)
|
||||||
|
@ -43,7 +43,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
v, err = setVariable(context.Background(), v, &variable, "foo")
|
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = convert.ToTyped(&variable, v)
|
err = convert.ToTyped(&variable, v)
|
||||||
|
@ -65,7 +65,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
v, err = setVariable(context.Background(), v, &variable, "foo")
|
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = convert.ToTyped(&variable, v)
|
err = convert.ToTyped(&variable, v)
|
||||||
|
@ -90,7 +90,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
v, err = setVariable(context.Background(), v, &variable, "foo")
|
v, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
err = convert.ToTyped(&variable, v)
|
err = convert.ToTyped(&variable, v)
|
||||||
|
@ -107,8 +107,8 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = setVariable(context.Background(), v, &variable, "foo")
|
_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable")
|
assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done using \"--var\", by setting the BUNDLE_VAR_foo environment variable, or in .databricks/bundle/<target>/variable-overrides.json file")
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestSetVariablesMutator(t *testing.T) {
|
func TestSetVariablesMutator(t *testing.T) {
|
||||||
|
@ -157,6 +157,6 @@ func TestSetComplexVariablesViaEnvVariablesIsNotAllowed(t *testing.T) {
|
||||||
v, err := convert.FromTyped(variable, dyn.NilValue)
|
v, err := convert.FromTyped(variable, dyn.NilValue)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = setVariable(context.Background(), v, &variable, "foo")
|
_, err = setVariable(context.Background(), v, &variable, "foo", dyn.NilValue)
|
||||||
assert.ErrorContains(t, err, "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
|
assert.ErrorContains(t, err, "setting via environment variables (BUNDLE_VAR_foo) is not supported for complex variable foo")
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,11 +36,12 @@ type Variable struct {
|
||||||
// This field stores the resolved value for the variable. The variable are
|
// This field stores the resolved value for the variable. The variable are
|
||||||
// resolved in the following priority order (from highest to lowest)
|
// resolved in the following priority order (from highest to lowest)
|
||||||
//
|
//
|
||||||
// 1. Command line flag. For example: `--var="foo=bar"`
|
// 1. Command line flag `--var="foo=bar"`
|
||||||
// 2. Target variable. eg: BUNDLE_VAR_foo=bar
|
// 2. Environment variable. eg: BUNDLE_VAR_foo=bar
|
||||||
// 3. Default value as defined in the applicable environments block
|
// 3. Load defaults from .databricks/bundle/<target>/variable-overrides.json
|
||||||
// 4. Default value defined in variable definition
|
// 4. Default value as defined in the applicable targets block
|
||||||
// 5. Throw error, since if no default value is defined, then the variable
|
// 5. Default value defined in variable definition
|
||||||
|
// 6. Throw error, since if no default value is defined, then the variable
|
||||||
// is required
|
// is required
|
||||||
Value VariableValue `json:"value,omitempty" bundle:"readonly"`
|
Value VariableValue `json:"value,omitempty" bundle:"readonly"`
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,6 @@ func TestComputeMetadataMutator(t *testing.T) {
|
||||||
OriginURL: "www.host.com",
|
OriginURL: "www.host.com",
|
||||||
Commit: "abcd",
|
Commit: "abcd",
|
||||||
BundleRootPath: "a/b/c/d",
|
BundleRootPath: "a/b/c/d",
|
||||||
Inferred: true,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -72,9 +71,6 @@ func TestComputeMetadataMutator(t *testing.T) {
|
||||||
OriginURL: "www.host.com",
|
OriginURL: "www.host.com",
|
||||||
Commit: "abcd",
|
Commit: "abcd",
|
||||||
BundleRootPath: "a/b/c/d",
|
BundleRootPath: "a/b/c/d",
|
||||||
|
|
||||||
// Test that this field doesn't carry over into the metadata.
|
|
||||||
Inferred: false,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Resources: metadata.Resources{
|
Resources: metadata.Resources{
|
||||||
|
|
|
@ -108,6 +108,14 @@ var envCopy = []string{
|
||||||
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
|
// Include $TF_CLI_CONFIG_FILE to override terraform provider in development.
|
||||||
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
|
// See: https://developer.hashicorp.com/terraform/cli/config/config-file#explicit-installation-method-configuration
|
||||||
"TF_CLI_CONFIG_FILE",
|
"TF_CLI_CONFIG_FILE",
|
||||||
|
|
||||||
|
// Include $USE_SDK_V2_RESOURCES and $USE_SDK_V2_DATA_SOURCES, these are used to switch back from plugin framework to SDKv2.
|
||||||
|
// This is used for mitigation issues with resource migrated to plugin framework, as recommended here:
|
||||||
|
// https://registry.terraform.io/providers/databricks/databricks/latest/docs/guides/troubleshooting#plugin-framework-migration-problems
|
||||||
|
// It is currently a workaround for deploying quality_monitors
|
||||||
|
// https://github.com/databricks/terraform-provider-databricks/issues/4229#issuecomment-2520344690
|
||||||
|
"USE_SDK_V2_RESOURCES",
|
||||||
|
"USE_SDK_V2_DATA_SOURCES",
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function inherits some environment variables for Terraform CLI.
|
// This function inherits some environment variables for Terraform CLI.
|
||||||
|
|
|
@ -38,6 +38,12 @@ func (appConverter) Convert(ctx context.Context, key string, vin dyn.Value, out
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// We always set no_compute to true as it allows DABs not to wait for app compute to be started when app is created.
|
||||||
|
vout, err = dyn.Set(vout, "no_compute", dyn.V(true))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Add the converted resource to the output.
|
// Add the converted resource to the output.
|
||||||
out.App[key] = vout.AsAny()
|
out.App[key] = vout.AsAny()
|
||||||
|
|
||||||
|
|
|
@ -63,6 +63,7 @@ func TestConvertApp(t *testing.T) {
|
||||||
assert.Equal(t, map[string]any{
|
assert.Equal(t, map[string]any{
|
||||||
"description": "app description",
|
"description": "app description",
|
||||||
"name": "app_id",
|
"name": "app_id",
|
||||||
|
"no_compute": true,
|
||||||
"resources": []any{
|
"resources": []any{
|
||||||
map[string]any{
|
map[string]any{
|
||||||
"name": "job1",
|
"name": "job1",
|
||||||
|
@ -136,6 +137,7 @@ func TestConvertAppWithNoDescription(t *testing.T) {
|
||||||
assert.Equal(t, map[string]any{
|
assert.Equal(t, map[string]any{
|
||||||
"name": "app_id",
|
"name": "app_id",
|
||||||
"description": "", // Due to Apps API always returning a description field, we set it in the output as well to avoid permanent TF drift
|
"description": "", // Due to Apps API always returning a description field, we set it in the output as well to avoid permanent TF drift
|
||||||
|
"no_compute": true,
|
||||||
"resources": []any{
|
"resources": []any{
|
||||||
map[string]any{
|
map[string]any{
|
||||||
"name": "job1",
|
"name": "job1",
|
||||||
|
|
|
@ -0,0 +1,79 @@
|
||||||
|
## docs-autogen
|
||||||
|
|
||||||
|
1. Install [Golang](https://go.dev/doc/install)
|
||||||
|
2. Run `make vendor docs` from the repo
|
||||||
|
3. See generated documents in `./bundle/docsgen/output` directory
|
||||||
|
4. To change descriptions update content in `./bundle/internal/schema/annotations.yml` or `./bundle/internal/schema/annotations_openapi_overrides.yml` and re-run `make docs`
|
||||||
|
|
||||||
|
For simpler usage run it together with copy command to move resulting files to local `docs` repo. Note that it will overwrite any local changes in affected files. Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
make docs && cp bundle/docgen/output/*.md ../docs/source/dev-tools/bundles
|
||||||
|
```
|
||||||
|
|
||||||
|
To change intro sections for files update them in `templates/` directory
|
||||||
|
|
||||||
|
### Annotation file structure
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
"<root-type-name>":
|
||||||
|
"<property-name>":
|
||||||
|
description: Description of the property, only plain text is supported
|
||||||
|
markdown_description: Description with markdown support, if defined it will override the value in docs and in JSON-schema
|
||||||
|
markdown_examples: Custom block for any example, in free form, Markdown is supported
|
||||||
|
title: JSON-schema title, not used in docs
|
||||||
|
default: Default value of the property, not used in docs
|
||||||
|
enum: Possible values of enum-type, not used in docs
|
||||||
|
```
|
||||||
|
|
||||||
|
Descriptions with `PLACEHOLDER` value are not displayed in docs and JSON-schema
|
||||||
|
|
||||||
|
All relative links like `[_](/dev-tools/bundles/settings.md#cluster_id)` are kept as is in docs but converted to absolute links in JSON schema
|
||||||
|
|
||||||
|
To change description for type itself (not its fields) use `"_"`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
github.com/databricks/cli/bundle/config/resources.Cluster:
|
||||||
|
"_":
|
||||||
|
"markdown_description": |-
|
||||||
|
The cluster resource defines an [all-purpose cluster](/api/workspace/clusters/create).
|
||||||
|
```
|
||||||
|
|
||||||
|
### Example annotation
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
github.com/databricks/cli/bundle/config.Bundle:
|
||||||
|
"cluster_id":
|
||||||
|
"description": |-
|
||||||
|
The ID of a cluster to use to run the bundle.
|
||||||
|
"markdown_description": |-
|
||||||
|
The ID of a cluster to use to run the bundle. See [_](/dev-tools/bundles/settings.md#cluster_id).
|
||||||
|
"compute_id":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
"databricks_cli_version":
|
||||||
|
"description": |-
|
||||||
|
The Databricks CLI version to use for the bundle.
|
||||||
|
"markdown_description": |-
|
||||||
|
The Databricks CLI version to use for the bundle. See [_](/dev-tools/bundles/settings.md#databricks_cli_version).
|
||||||
|
"deployment":
|
||||||
|
"description": |-
|
||||||
|
The definition of the bundle deployment
|
||||||
|
"markdown_description": |-
|
||||||
|
The definition of the bundle deployment. For supported attributes, see [_](#deployment) and [_](/dev-tools/bundles/deployment-modes.md).
|
||||||
|
"git":
|
||||||
|
"description": |-
|
||||||
|
The Git version control details that are associated with your bundle.
|
||||||
|
"markdown_description": |-
|
||||||
|
The Git version control details that are associated with your bundle. For supported attributes, see [_](#git) and [_](/dev-tools/bundles/settings.md#git).
|
||||||
|
"name":
|
||||||
|
"description": |-
|
||||||
|
The name of the bundle.
|
||||||
|
"uuid":
|
||||||
|
"description": |-
|
||||||
|
PLACEHOLDER
|
||||||
|
```
|
||||||
|
|
||||||
|
### TODO
|
||||||
|
|
||||||
|
Add file watcher to track changes in the annotation files and re-run `make docs` script automtically
|
|
@ -0,0 +1,135 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/internal/annotation"
|
||||||
|
"github.com/databricks/cli/libs/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
rootFileName = "reference.md"
|
||||||
|
resourcesFileName = "resources.md"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
if len(os.Args) != 3 {
|
||||||
|
fmt.Println("Usage: go run main.go <annotation-file> <output-file>")
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
annotationDir := os.Args[1]
|
||||||
|
docsDir := os.Args[2]
|
||||||
|
outputDir := path.Join(docsDir, "output")
|
||||||
|
templatesDir := path.Join(docsDir, "templates")
|
||||||
|
|
||||||
|
if _, err := os.Stat(outputDir); os.IsNotExist(err) {
|
||||||
|
if err := os.MkdirAll(outputDir, 0o755); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rootHeader, err := os.ReadFile(path.Join(templatesDir, rootFileName))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
err = generateDocs(
|
||||||
|
[]string{path.Join(annotationDir, "annotations.yml")},
|
||||||
|
path.Join(outputDir, rootFileName),
|
||||||
|
reflect.TypeOf(config.Root{}),
|
||||||
|
string(rootHeader),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
resourcesHeader, err := os.ReadFile(path.Join(templatesDir, resourcesFileName))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
err = generateDocs(
|
||||||
|
[]string{path.Join(annotationDir, "annotations_openapi.yml"), path.Join(annotationDir, "annotations_openapi_overrides.yml"), path.Join(annotationDir, "annotations.yml")},
|
||||||
|
path.Join(outputDir, resourcesFileName),
|
||||||
|
reflect.TypeOf(config.Resources{}),
|
||||||
|
string(resourcesHeader),
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func generateDocs(inputPaths []string, outputPath string, rootType reflect.Type, header string) error {
|
||||||
|
annotations, err := annotation.LoadAndMerge(inputPaths)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// schemas is used to resolve references to schemas
|
||||||
|
schemas := map[string]*jsonschema.Schema{}
|
||||||
|
// ownFields is used to track fields that are defined in the annotation file and should be included in the docs page
|
||||||
|
ownFields := map[string]bool{}
|
||||||
|
|
||||||
|
s, err := jsonschema.FromType(rootType, []func(reflect.Type, jsonschema.Schema) jsonschema.Schema{
|
||||||
|
func(typ reflect.Type, s jsonschema.Schema) jsonschema.Schema {
|
||||||
|
_, isOwnField := annotations[jsonschema.TypePath(typ)]
|
||||||
|
if isOwnField {
|
||||||
|
ownFields[jsonschema.TypePath(typ)] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
refPath := getPath(typ)
|
||||||
|
shouldHandle := strings.HasPrefix(refPath, "github.com")
|
||||||
|
if !shouldHandle {
|
||||||
|
schemas[jsonschema.TypePath(typ)] = &s
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
a := annotations[refPath]
|
||||||
|
if a == nil {
|
||||||
|
a = map[string]annotation.Descriptor{}
|
||||||
|
}
|
||||||
|
|
||||||
|
rootTypeAnnotation, ok := a["_"]
|
||||||
|
if ok {
|
||||||
|
assignAnnotation(&s, rootTypeAnnotation)
|
||||||
|
}
|
||||||
|
|
||||||
|
for k, v := range s.Properties {
|
||||||
|
assignAnnotation(v, a[k])
|
||||||
|
}
|
||||||
|
|
||||||
|
schemas[jsonschema.TypePath(typ)] = &s
|
||||||
|
return s
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes := buildNodes(s, schemas, ownFields)
|
||||||
|
err = buildMarkdown(nodes, outputPath, header)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getPath(typ reflect.Type) string {
|
||||||
|
return typ.PkgPath() + "." + typ.Name()
|
||||||
|
}
|
||||||
|
|
||||||
|
func assignAnnotation(s *jsonschema.Schema, a annotation.Descriptor) {
|
||||||
|
if a.Description != "" && a.Description != annotation.Placeholder {
|
||||||
|
s.Description = a.Description
|
||||||
|
}
|
||||||
|
if a.MarkdownDescription != "" {
|
||||||
|
s.MarkdownDescription = a.MarkdownDescription
|
||||||
|
}
|
||||||
|
if a.MarkdownExamples != "" {
|
||||||
|
s.Examples = []any{a.MarkdownExamples}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,99 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
func buildMarkdown(nodes []rootNode, outputFile, header string) error {
|
||||||
|
m := newMardownRenderer()
|
||||||
|
m = m.PlainText(header)
|
||||||
|
for _, node := range nodes {
|
||||||
|
m = m.LF()
|
||||||
|
if node.TopLevel {
|
||||||
|
m = m.H2(node.Title)
|
||||||
|
} else {
|
||||||
|
m = m.H3(node.Title)
|
||||||
|
}
|
||||||
|
m = m.LF()
|
||||||
|
|
||||||
|
if node.Type != "" {
|
||||||
|
m = m.PlainText(fmt.Sprintf("**`Type: %s`**", node.Type))
|
||||||
|
m = m.LF()
|
||||||
|
}
|
||||||
|
m = m.PlainText(node.Description)
|
||||||
|
m = m.LF()
|
||||||
|
|
||||||
|
if len(node.ObjectKeyAttributes) > 0 {
|
||||||
|
n := pickLastWord(node.Title)
|
||||||
|
n = removePluralForm(n)
|
||||||
|
m = m.CodeBlocks("yaml", fmt.Sprintf("%ss:\n <%s-name>:\n <%s-field-name>: <%s-field-value>", n, n, n, n))
|
||||||
|
m = m.LF()
|
||||||
|
m = buildAttributeTable(m, node.ObjectKeyAttributes)
|
||||||
|
} else if len(node.ArrayItemAttributes) > 0 {
|
||||||
|
m = m.LF()
|
||||||
|
m = buildAttributeTable(m, node.ArrayItemAttributes)
|
||||||
|
} else if len(node.Attributes) > 0 {
|
||||||
|
m = m.LF()
|
||||||
|
m = buildAttributeTable(m, node.Attributes)
|
||||||
|
}
|
||||||
|
|
||||||
|
if node.Example != "" {
|
||||||
|
m = m.LF()
|
||||||
|
m = m.PlainText("**Example**")
|
||||||
|
m = m.LF()
|
||||||
|
m = m.PlainText(node.Example)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(outputFile)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
_, err = f.WriteString(m.String())
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
return f.Close()
|
||||||
|
}
|
||||||
|
|
||||||
|
func pickLastWord(s string) string {
|
||||||
|
words := strings.Split(s, ".")
|
||||||
|
return words[len(words)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a custom table which we use in Databricks website
|
||||||
|
func buildAttributeTable(m *markdownRenderer, attributes []attributeNode) *markdownRenderer {
|
||||||
|
m = m.LF()
|
||||||
|
m = m.PlainText(".. list-table::")
|
||||||
|
m = m.PlainText(" :header-rows: 1")
|
||||||
|
m = m.LF()
|
||||||
|
|
||||||
|
m = m.PlainText(" * - Key")
|
||||||
|
m = m.PlainText(" - Type")
|
||||||
|
m = m.PlainText(" - Description")
|
||||||
|
m = m.LF()
|
||||||
|
|
||||||
|
for _, a := range attributes {
|
||||||
|
m = m.PlainText(" * - " + fmt.Sprintf("`%s`", a.Title))
|
||||||
|
m = m.PlainText(" - " + a.Type)
|
||||||
|
m = m.PlainText(" - " + formatDescription(a))
|
||||||
|
m = m.LF()
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatDescription(a attributeNode) string {
|
||||||
|
s := strings.ReplaceAll(a.Description, "\n", " ")
|
||||||
|
if a.Link != "" {
|
||||||
|
if strings.HasSuffix(s, ".") {
|
||||||
|
s += " "
|
||||||
|
} else if s != "" {
|
||||||
|
s += ". "
|
||||||
|
}
|
||||||
|
s += fmt.Sprintf("See [_](#%s).", a.Link)
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
|
@ -0,0 +1,228 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// rootNode is an intermediate representation of resolved JSON-schema item that is used to generate documentation
|
||||||
|
// Every schema node goes follows this conversion `JSON-schema -> rootNode -> markdown text`
|
||||||
|
type rootNode struct {
|
||||||
|
Title string
|
||||||
|
Description string
|
||||||
|
Attributes []attributeNode
|
||||||
|
Example string
|
||||||
|
ObjectKeyAttributes []attributeNode
|
||||||
|
ArrayItemAttributes []attributeNode
|
||||||
|
TopLevel bool
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
type attributeNode struct {
|
||||||
|
Title string
|
||||||
|
Type string
|
||||||
|
Description string
|
||||||
|
Link string
|
||||||
|
}
|
||||||
|
|
||||||
|
type rootProp struct {
|
||||||
|
// k is the name of the property
|
||||||
|
k string
|
||||||
|
// v is the corresponding json-schema node
|
||||||
|
v *jsonschema.Schema
|
||||||
|
// topLevel is true only for direct properties of the schema of root type (e.g. config.Root or config.Resources)
|
||||||
|
// Example: config.Root has .
|
||||||
|
topLevel bool
|
||||||
|
// circular indicates if property was added by recursive type, e.g. task.for_each_task.task.for_each_task
|
||||||
|
// These entries don't expand further and don't add any new nodes from their properties
|
||||||
|
circular bool
|
||||||
|
}
|
||||||
|
|
||||||
|
const MapType = "Map"
|
||||||
|
|
||||||
|
// buildNodes converts JSON-schema to a flat list of rootNode items that are then used to generate markdown documentation
|
||||||
|
// It recursively traverses the schema expanding the resulting list with new items for every properties of nodes `object` and `array` type
|
||||||
|
func buildNodes(s jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootNode {
|
||||||
|
rootProps := []rootProp{}
|
||||||
|
for k, v := range s.Properties {
|
||||||
|
rootProps = append(rootProps, rootProp{k, v, true, false})
|
||||||
|
}
|
||||||
|
nodes := make([]rootNode, 0, len(rootProps))
|
||||||
|
visited := make(map[string]bool)
|
||||||
|
|
||||||
|
for i := 0; i < len(rootProps); i++ {
|
||||||
|
item := rootProps[i]
|
||||||
|
k := item.k
|
||||||
|
v := item.v
|
||||||
|
|
||||||
|
if visited[k] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
visited[k] = true
|
||||||
|
|
||||||
|
v = resolveRefs(v, refs)
|
||||||
|
node := rootNode{
|
||||||
|
Title: k,
|
||||||
|
Description: getDescription(v, item.topLevel),
|
||||||
|
TopLevel: item.topLevel,
|
||||||
|
Example: getExample(v),
|
||||||
|
Type: getHumanReadableType(v.Type),
|
||||||
|
}
|
||||||
|
|
||||||
|
hasProperties := len(v.Properties) > 0
|
||||||
|
if hasProperties {
|
||||||
|
node.Attributes = getAttributes(v.Properties, refs, ownFields, k, item.circular)
|
||||||
|
}
|
||||||
|
|
||||||
|
mapValueType := getMapValueType(v, refs)
|
||||||
|
if mapValueType != nil {
|
||||||
|
d := getDescription(mapValueType, true)
|
||||||
|
if d != "" {
|
||||||
|
node.Description = d
|
||||||
|
}
|
||||||
|
if node.Example == "" {
|
||||||
|
node.Example = getExample(mapValueType)
|
||||||
|
}
|
||||||
|
node.ObjectKeyAttributes = getAttributes(mapValueType.Properties, refs, ownFields, getMapKeyPrefix(k), item.circular)
|
||||||
|
}
|
||||||
|
|
||||||
|
arrayItemType := resolveRefs(v.Items, refs)
|
||||||
|
if arrayItemType != nil {
|
||||||
|
node.ArrayItemAttributes = getAttributes(arrayItemType.Properties, refs, ownFields, k, item.circular)
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes = append(nodes, node)
|
||||||
|
|
||||||
|
// Whether we should add new root props from the children of the current JSON-schema node to include their definitions to this document
|
||||||
|
shouldAddNewProps := !item.circular
|
||||||
|
if shouldAddNewProps {
|
||||||
|
newProps := []rootProp{}
|
||||||
|
// Adds node with definition for the properties. Example:
|
||||||
|
// bundle:
|
||||||
|
// prop-name: <value>
|
||||||
|
if hasProperties {
|
||||||
|
newProps = append(newProps, extractNodes(k, v.Properties, refs, ownFields)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Adds node with definition for the type of array item. Example:
|
||||||
|
// permissions:
|
||||||
|
// - <item>
|
||||||
|
if arrayItemType != nil {
|
||||||
|
newProps = append(newProps, extractNodes(k, arrayItemType.Properties, refs, ownFields)...)
|
||||||
|
}
|
||||||
|
// Adds node with definition for the type of the Map value. Example:
|
||||||
|
// targets:
|
||||||
|
// <key>: <value>
|
||||||
|
if mapValueType != nil {
|
||||||
|
newProps = append(newProps, extractNodes(getMapKeyPrefix(k), mapValueType.Properties, refs, ownFields)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
rootProps = append(rootProps, newProps...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sort.Slice(nodes, func(i, j int) bool {
|
||||||
|
return nodes[i].Title < nodes[j].Title
|
||||||
|
})
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMapValueType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema) *jsonschema.Schema {
|
||||||
|
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
|
||||||
|
if ok {
|
||||||
|
return resolveRefs(additionalProps, refs)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getMapKeyPrefix(s string) string {
|
||||||
|
return s + ".<name>"
|
||||||
|
}
|
||||||
|
|
||||||
|
func removePluralForm(s string) string {
|
||||||
|
if strings.HasSuffix(s, "s") {
|
||||||
|
return strings.TrimSuffix(s, "s")
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func getHumanReadableType(t jsonschema.Type) string {
|
||||||
|
typesMapping := map[string]string{
|
||||||
|
"string": "String",
|
||||||
|
"integer": "Integer",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
"array": "Sequence",
|
||||||
|
"object": "Map",
|
||||||
|
}
|
||||||
|
return typesMapping[string(t)]
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAttributes(props, refs map[string]*jsonschema.Schema, ownFields map[string]bool, prefix string, circular bool) []attributeNode {
|
||||||
|
attributes := []attributeNode{}
|
||||||
|
for k, v := range props {
|
||||||
|
v = resolveRefs(v, refs)
|
||||||
|
typeString := getHumanReadableType(v.Type)
|
||||||
|
if typeString == "" {
|
||||||
|
typeString = "Any"
|
||||||
|
}
|
||||||
|
var reference string
|
||||||
|
if isReferenceType(v, refs, ownFields) && !circular {
|
||||||
|
reference = prefix + "." + k
|
||||||
|
}
|
||||||
|
attributes = append(attributes, attributeNode{
|
||||||
|
Title: k,
|
||||||
|
Type: typeString,
|
||||||
|
Description: getDescription(v, true),
|
||||||
|
Link: reference,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
sort.Slice(attributes, func(i, j int) bool {
|
||||||
|
return attributes[i].Title < attributes[j].Title
|
||||||
|
})
|
||||||
|
return attributes
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDescription(s *jsonschema.Schema, allowMarkdown bool) string {
|
||||||
|
if allowMarkdown && s.MarkdownDescription != "" {
|
||||||
|
return s.MarkdownDescription
|
||||||
|
}
|
||||||
|
return s.Description
|
||||||
|
}
|
||||||
|
|
||||||
|
func shouldExtract(ref string, ownFields map[string]bool) bool {
|
||||||
|
if i := strings.Index(ref, "github.com"); i >= 0 {
|
||||||
|
ref = ref[i:]
|
||||||
|
}
|
||||||
|
_, isCustomField := ownFields[ref]
|
||||||
|
return isCustomField
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractNodes returns a list of rootProp items for all properties of the json-schema node that should be extracted based on context
|
||||||
|
// E.g. we extract all propert
|
||||||
|
func extractNodes(prefix string, props, refs map[string]*jsonschema.Schema, ownFields map[string]bool) []rootProp {
|
||||||
|
nodes := []rootProp{}
|
||||||
|
for k, v := range props {
|
||||||
|
if v.Reference != nil && !shouldExtract(*v.Reference, ownFields) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
v = resolveRefs(v, refs)
|
||||||
|
if v.Type == "object" || v.Type == "array" {
|
||||||
|
nodes = append(nodes, rootProp{prefix + "." + k, v, false, isCycleField(k)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nodes
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCycleField(field string) bool {
|
||||||
|
return field == "for_each_task"
|
||||||
|
}
|
||||||
|
|
||||||
|
func getExample(v *jsonschema.Schema) string {
|
||||||
|
examples := v.Examples
|
||||||
|
if len(examples) == 0 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return examples[0].(string)
|
||||||
|
}
|
|
@ -0,0 +1,120 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/jsonschema"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBuildNodes_ChildExpansion(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
schema jsonschema.Schema
|
||||||
|
refs map[string]*jsonschema.Schema
|
||||||
|
ownFields map[string]bool
|
||||||
|
wantNodes []rootNode
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "array expansion",
|
||||||
|
schema: jsonschema.Schema{
|
||||||
|
Type: "object",
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"list": {
|
||||||
|
Type: "array",
|
||||||
|
Items: &jsonschema.Schema{
|
||||||
|
Type: "object",
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"listSub": {Reference: strPtr("#/$defs/github.com/listSub")},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
refs: map[string]*jsonschema.Schema{
|
||||||
|
"github.com/listSub": {Type: "array", Items: &jsonschema.Schema{Type: "object", Properties: map[string]*jsonschema.Schema{"subField": {Type: "string"}}}},
|
||||||
|
},
|
||||||
|
ownFields: map[string]bool{"github.com/listSub": true},
|
||||||
|
wantNodes: []rootNode{
|
||||||
|
{
|
||||||
|
Title: "list",
|
||||||
|
TopLevel: true,
|
||||||
|
Type: "Sequence",
|
||||||
|
ArrayItemAttributes: []attributeNode{
|
||||||
|
{Title: "listSub", Type: "Sequence", Link: "list.listSub"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Title: "list.listSub",
|
||||||
|
Type: "Sequence",
|
||||||
|
ArrayItemAttributes: []attributeNode{
|
||||||
|
{Title: "subField", Type: "String"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "map expansion",
|
||||||
|
schema: jsonschema.Schema{
|
||||||
|
Type: "object",
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"myMap": {
|
||||||
|
Type: "object",
|
||||||
|
AdditionalProperties: &jsonschema.Schema{
|
||||||
|
Reference: strPtr("#/$defs/github.com/myMap"),
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"mapSub": {Type: "object", Reference: strPtr("#/$defs/github.com/mapSub")},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
refs: map[string]*jsonschema.Schema{
|
||||||
|
"github.com/myMap": {
|
||||||
|
Type: "object",
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"mapSub": {Type: "boolean", Reference: strPtr("#/$defs/github.com/mapSub")},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"github.com/mapSub": {
|
||||||
|
Type: "object",
|
||||||
|
Properties: map[string]*jsonschema.Schema{
|
||||||
|
"deepSub": {Type: "boolean"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ownFields: map[string]bool{
|
||||||
|
"github.com/myMap": true,
|
||||||
|
"github.com/mapSub": true,
|
||||||
|
},
|
||||||
|
wantNodes: []rootNode{
|
||||||
|
{
|
||||||
|
Title: "myMap",
|
||||||
|
TopLevel: true,
|
||||||
|
Type: "Map",
|
||||||
|
ObjectKeyAttributes: []attributeNode{
|
||||||
|
{Title: "mapSub", Type: "Map", Link: "myMap.<name>.mapSub"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Title: "myMap.<name>.mapSub",
|
||||||
|
Type: "Map",
|
||||||
|
Attributes: []attributeNode{
|
||||||
|
{Title: "deepSub", Type: "Boolean"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
got := buildNodes(tt.schema, tt.refs, tt.ownFields)
|
||||||
|
assert.Equal(t, tt.wantNodes, got)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func strPtr(s string) *string {
|
||||||
|
return &s
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,97 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/libs/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
func isReferenceType(v *jsonschema.Schema, refs map[string]*jsonschema.Schema, ownFields map[string]bool) bool {
|
||||||
|
if v.Type != "object" && v.Type != "array" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if len(v.Properties) > 0 {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if v.Items != nil {
|
||||||
|
items := resolveRefs(v.Items, refs)
|
||||||
|
if items != nil && items.Type == "object" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
props := resolveAdditionalProperties(v)
|
||||||
|
if !isInOwnFields(props, ownFields) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if props != nil {
|
||||||
|
propsResolved := resolveRefs(props, refs)
|
||||||
|
return propsResolved.Type == "object"
|
||||||
|
}
|
||||||
|
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func isInOwnFields(node *jsonschema.Schema, ownFields map[string]bool) bool {
|
||||||
|
if node != nil && node.Reference != nil {
|
||||||
|
return ownFields[getRefType(node)]
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveAdditionalProperties(v *jsonschema.Schema) *jsonschema.Schema {
|
||||||
|
if v.AdditionalProperties == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
additionalProps, ok := v.AdditionalProperties.(*jsonschema.Schema)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return additionalProps
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveRefs(s *jsonschema.Schema, schemas map[string]*jsonschema.Schema) *jsonschema.Schema {
|
||||||
|
if s == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
node := s
|
||||||
|
description := s.Description
|
||||||
|
markdownDescription := s.MarkdownDescription
|
||||||
|
examples := s.Examples
|
||||||
|
|
||||||
|
for node.Reference != nil {
|
||||||
|
ref := getRefType(node)
|
||||||
|
newNode, ok := schemas[ref]
|
||||||
|
if !ok {
|
||||||
|
log.Printf("schema %s not found", ref)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if description == "" {
|
||||||
|
description = newNode.Description
|
||||||
|
}
|
||||||
|
if markdownDescription == "" {
|
||||||
|
markdownDescription = newNode.MarkdownDescription
|
||||||
|
}
|
||||||
|
if len(examples) == 0 {
|
||||||
|
examples = newNode.Examples
|
||||||
|
}
|
||||||
|
|
||||||
|
node = newNode
|
||||||
|
}
|
||||||
|
|
||||||
|
newNode := *node
|
||||||
|
newNode.Description = description
|
||||||
|
newNode.MarkdownDescription = markdownDescription
|
||||||
|
newNode.Examples = examples
|
||||||
|
|
||||||
|
return &newNode
|
||||||
|
}
|
||||||
|
|
||||||
|
func getRefType(node *jsonschema.Schema) string {
|
||||||
|
if node.Reference == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.TrimPrefix(*node.Reference, "#/$defs/")
|
||||||
|
}
|
|
@ -0,0 +1,51 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"runtime"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type markdownRenderer struct {
|
||||||
|
nodes []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newMardownRenderer() *markdownRenderer {
|
||||||
|
return &markdownRenderer{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) add(s string) *markdownRenderer {
|
||||||
|
m.nodes = append(m.nodes, s)
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) PlainText(s string) *markdownRenderer {
|
||||||
|
return m.add(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) LF() *markdownRenderer {
|
||||||
|
return m.add(" ")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) H2(s string) *markdownRenderer {
|
||||||
|
return m.add("## " + s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) H3(s string) *markdownRenderer {
|
||||||
|
return m.add("### " + s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) CodeBlocks(lang, s string) *markdownRenderer {
|
||||||
|
return m.add(fmt.Sprintf("```%s%s%s%s```", lang, lineFeed(), s, lineFeed()))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *markdownRenderer) String() string {
|
||||||
|
return strings.Join(m.nodes, lineFeed())
|
||||||
|
}
|
||||||
|
|
||||||
|
func lineFeed() string {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return "\r\n"
|
||||||
|
}
|
||||||
|
return "\n"
|
||||||
|
}
|
|
@ -0,0 +1,10 @@
|
||||||
|
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
|
||||||
|
---
|
||||||
|
description: Configuration reference for databricks.yml
|
||||||
|
---
|
||||||
|
|
||||||
|
# Configuration reference
|
||||||
|
|
||||||
|
This article provides reference for keys supported by <DABS> configuration (YAML). See [_](/dev-tools/bundles/index.md).
|
||||||
|
|
||||||
|
For complete bundle examples, see [_](/dev-tools/bundles/resource-examples.md) and the [bundle-examples GitHub repository](https://github.com/databricks/bundle-examples).
|
|
@ -0,0 +1,70 @@
|
||||||
|
<!-- DO NOT EDIT. This file is autogenerated with https://github.com/databricks/cli -->
|
||||||
|
---
|
||||||
|
description: Learn about resources supported by Databricks Asset Bundles and how to configure them.
|
||||||
|
---
|
||||||
|
|
||||||
|
# <DABS> resources
|
||||||
|
|
||||||
|
<DABS> allows you to specify information about the <Databricks> resources used by the bundle in the `resources` mapping in the bundle configuration. See [resources mapping](/dev-tools/bundles/settings.md#resources) and [resources key reference](/dev-tools/bundles/reference.md#resources).
|
||||||
|
|
||||||
|
This article outlines supported resource types for bundles and provides details and an example for each supported type. For additional examples, see [_](/dev-tools/bundles/resource-examples.md).
|
||||||
|
|
||||||
|
## <a id="resource-types"></a> Supported resources
|
||||||
|
|
||||||
|
The following table lists supported resource types for bundles. Some resources can be created by defining them in a bundle and deploying the bundle, and some resources only support referencing an existing resource to include in the bundle.
|
||||||
|
|
||||||
|
Resources are defined using the corresponding [Databricks REST API](/api/workspace/introduction) object's create operation request payload, where the object's supported fields, expressed as YAML, are the resource's supported properties. Links to documentation for each resource's corresponding payloads are listed in the table.
|
||||||
|
|
||||||
|
.. tip:: The `databricks bundle validate` command returns warnings if unknown resource properties are found in bundle configuration files.
|
||||||
|
|
||||||
|
|
||||||
|
.. list-table::
|
||||||
|
:header-rows: 1
|
||||||
|
|
||||||
|
* - Resource
|
||||||
|
- Create support
|
||||||
|
- Corresponding REST API object
|
||||||
|
|
||||||
|
* - [cluster](#cluster)
|
||||||
|
- ✓
|
||||||
|
- [Cluster object](/api/workspace/clusters/create)
|
||||||
|
|
||||||
|
* - [dashboard](#dashboard)
|
||||||
|
-
|
||||||
|
- [Dashboard object](/api/workspace/lakeview/create)
|
||||||
|
|
||||||
|
* - [experiment](#experiment)
|
||||||
|
- ✓
|
||||||
|
- [Experiment object](/api/workspace/experiments/createexperiment)
|
||||||
|
|
||||||
|
* - [job](#job)
|
||||||
|
- ✓
|
||||||
|
- [Job object](/api/workspace/jobs/create)
|
||||||
|
|
||||||
|
* - [model (legacy)](#model-legacy)
|
||||||
|
- ✓
|
||||||
|
- [Model (legacy) object](/api/workspace/modelregistry/createmodel)
|
||||||
|
|
||||||
|
* - [model_serving_endpoint](#model-serving-endpoint)
|
||||||
|
- ✓
|
||||||
|
- [Model serving endpoint object](/api/workspace/servingendpoints/create)
|
||||||
|
|
||||||
|
* - [pipeline](#pipeline)
|
||||||
|
- ✓
|
||||||
|
- [Pipeline object](/api/workspace/pipelines/create)
|
||||||
|
|
||||||
|
* - [quality_monitor](#quality-monitor)
|
||||||
|
- ✓
|
||||||
|
- [Quality monitor object](/api/workspace/qualitymonitors/create)
|
||||||
|
|
||||||
|
* - [registered_model](#registered-model) (<UC>)
|
||||||
|
- ✓
|
||||||
|
- [Registered model object](/api/workspace/registeredmodels/create)
|
||||||
|
|
||||||
|
* - [schema](#schema) (<UC>)
|
||||||
|
- ✓
|
||||||
|
- [Schema object](/api/workspace/schemas/create)
|
||||||
|
|
||||||
|
* - [volume](#volume) (<UC>)
|
||||||
|
- ✓
|
||||||
|
- [Volume object](/api/workspace/volumes/create)
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue