Merge branch 'main' into feat/offlineinstall

This commit is contained in:
hari-selvarajan_data 2025-02-19 23:11:12 +00:00
commit 17f4629f37
958 changed files with 31736 additions and 5656 deletions

View File

@ -1 +1 @@
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
c72c58f97b950fcb924a90ef164bcb10cfcd5ece

View File

@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
{{- end }}
)
{{- $excludeFromJson := list "http-request"}}
func new{{.PascalName}}() *cobra.Command {
cmd := &cobra.Command{}
{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
{{- if .Request}}
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
{{- if .RequestBodyField }}
{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
{{- end }}
{{- if .CanUseJson}}
{{- if $canUseJson}}
var {{.CamelName}}Json flags.JsonFlag
{{- end}}
{{- end}}
@ -135,14 +138,14 @@ func new{{.PascalName}}() *cobra.Command {
{{- $request = .RequestBodyField.Entity -}}
{{- end -}}
{{if $request }}// TODO: short flags
{{- if .CanUseJson}}
{{- if $canUseJson}}
cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
{{- end}}
{{$method := .}}
{{ if not .IsJsonOnly }}
{{range $request.Fields -}}
{{range .AllFields -}}
{{- if not .Required -}}
{{if .Entity.IsObject }}// TODO: complex arg: {{.Name}}
{{if .Entity.IsObject}}{{if not (eq . $method.RequestBodyField) }}// TODO: complex arg: {{.Name}}{{end}}
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command {
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
{{- $atleastOneArgumentWithDescription := false -}}
@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
ctx := cmd.Context()
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
{{- if .Request }}
{{ if .CanUseJson }}
{{ if $canUseJson }}
if cmd.Flags().Changed("json") {
diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
if diags.HasError() {
@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}{{- end}}
{{- if $hasPosArgs }}
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
{{- end}}
{{- if $hasIdPrompt}}
if len(args) == 0 {
@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
{{$method := .}}
{{- range $arg, $field := .RequiredPositionalArguments}}
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
{{- end -}}
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
}
{{- end}}
@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
{{- $method := .Method -}}
{{- $arg := .Arg -}}
{{- $hasIdPrompt := .HasIdPrompt -}}
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
{{- if $optionalIfJsonIsUsed }}
if !cmd.Flags().Changed("json") {
{{- end }}

6
.gitattributes vendored
View File

@ -1,11 +1,13 @@
cmd/account/access-control/access-control.go linguist-generated=true
cmd/account/billable-usage/billable-usage.go linguist-generated=true
cmd/account/budget-policy/budget-policy.go linguist-generated=true
cmd/account/budgets/budgets.go linguist-generated=true
cmd/account/cmd.go linguist-generated=true
cmd/account/credentials/credentials.go linguist-generated=true
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated=true
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
cmd/account/federation-policy/federation-policy.go linguist-generated=true
@ -31,6 +33,7 @@ cmd/account/users/users.go linguist-generated=true
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
cmd/account/workspaces/workspaces.go linguist-generated=true
cmd/workspace/access-control/access-control.go linguist-generated=true
cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
@ -74,6 +77,7 @@ cmd/workspace/instance-pools/instance-pools.go linguist-generated=true
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
cmd/workspace/jobs/jobs.go linguist-generated=true
cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true
cmd/workspace/lakeview/lakeview.go linguist-generated=true
cmd/workspace/libraries/libraries.go linguist-generated=true
cmd/workspace/metastores/metastores.go linguist-generated=true
@ -98,11 +102,13 @@ cmd/workspace/providers/providers.go linguist-generated=true
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
cmd/workspace/queries/queries.go linguist-generated=true
cmd/workspace/query-execution/query-execution.go linguist-generated=true
cmd/workspace/query-history/query-history.go linguist-generated=true
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
cmd/workspace/recipients/recipients.go linguist-generated=true
cmd/workspace/redash-config/redash-config.go linguist-generated=true
cmd/workspace/registered-models/registered-models.go linguist-generated=true
cmd/workspace/repos/repos.go linguist-generated=true
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true

2
.github/CODEOWNERS vendored Normal file
View File

@ -0,0 +1,2 @@
* @pietern @andrewnester @shreyas-goenka @denik
cmd/labs @alexott @nfx

View File

@ -4,3 +4,7 @@ updates:
directory: "/"
schedule:
interval: "weekly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@ -0,0 +1,38 @@
name: "Close Stale Issues"
on:
workflow_dispatch:
schedule:
- cron: "0 0 * * *" # Run at midnight every day
jobs:
cleanup:
name: Stale issue job
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
permissions:
issues: write
contents: read
pull-requests: write
steps:
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
with:
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
# These labels are required
stale-issue-label: Stale
stale-pr-label: Stale
exempt-issue-labels: No Autoclose
exempt-pr-labels: No Autoclose
# Issue timing
days-before-stale: 60
days-before-close: 30
repo-token: ${{ secrets.GITHUB_TOKEN }}
loglevel: DEBUG

View File

@ -13,12 +13,19 @@ on:
jobs:
comment-on-pr:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
permissions:
pull-requests: write
# Only run this job for PRs from forks.
# Integration tests are not run automatically for PRs from forks.
if: "${{ github.event.pull_request.head.repo.fork }}"
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Delete old comments
env:
@ -43,7 +50,7 @@ jobs:
run: |
gh pr comment ${{ github.event.pull_request.number }} --body \
"<!-- INTEGRATION_TESTS_MANUAL -->
If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
An authorized user can trigger integration tests manually by following the instructions below:
Trigger:
[go/deco-tests-run/cli](https://go/deco-tests-run/cli)

View File

@ -17,7 +17,9 @@ jobs:
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
#
trigger:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
steps:
- name: Auto-approve squashed commit

View File

@ -11,13 +11,16 @@ jobs:
# This workflow triggers the integration test workflow in a different repository.
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
trigger:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
environment: "test-trigger-is"
steps:
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@v1
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -5,41 +5,25 @@ on:
types: [opened, synchronize]
jobs:
check-token:
runs-on: ubuntu-latest
environment: "test-trigger-is"
outputs:
has_token: ${{ steps.set-token-status.outputs.has_token }}
steps:
- name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
id: set-token-status
run: |
if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
echo "::set-output name=has_token::false"
else
echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
echo "::set-output name=has_token::true"
fi
# Trigger for pull requests.
#
# This workflow triggers the integration test workflow in a different repository.
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
# It depends on the "check-token" workflow to confirm access to this environment to avoid failures.
trigger:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
environment: "test-trigger-is"
if: needs.check-token.outputs.has_token == 'true'
needs: check-token
# Only run this job for PRs from branches on the main repository and not from forks.
# Workflows triggered by PRs from forks don't have access to the "test-trigger-is" environment.
if: "${{ !github.event.pull_request.head.repo.fork }}"
steps:
- name: Generate GitHub App Token
id: generate-token
uses: actions/create-github-app-token@v1
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -2,15 +2,73 @@ name: publish-winget
on:
workflow_dispatch:
inputs:
tag:
description: 'Tag to publish'
default: ''
jobs:
publish-to-winget-pkgs:
runs-on: windows-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
environment: release
steps:
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # https://github.com/vedantmgoyal2009/winget-releaser/releases/tag/v2
with:
identifier: Databricks.DatabricksCLI
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
fork-user: eng-dev-ecosystem-bot
- name: Checkout repository and submodules
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
# When updating the version of komac, make sure to update the checksum in the next step.
# Find both at https://github.com/russellbanks/Komac/releases.
- name: Download komac binary
run: |
curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
- name: Verify komac binary
run: |
echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
- name: Untar komac binary to temporary path
run: |
mkdir -p $RUNNER_TEMP/komac
tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
- name: Add komac to PATH
run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
- name: Confirm komac version
run: komac --version
# Use the tag from the input, or the ref name if the input is not provided.
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
- name: Strip "v" prefix from version
id: strip_version
run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
- name: Get URLs of signed Windows binaries
id: get_windows_urls
run: |
urls=$(
gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
jq -r .assets[].browser_download_url | \
grep -E '_windows_.*-signed\.zip$' | \
tr '\n' ' '
)
if [ -z "$urls" ]; then
echo "No signed Windows binaries found" >&2
exit 1
fi
echo "urls=$urls" >> "$GITHUB_OUTPUT"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Publish to Winget
run: |
komac update Databricks.DatabricksCLI \
--version ${{ steps.strip_version.outputs.version }} \
--submit \
--urls ${{ steps.get_windows_urls.outputs.urls }} \
env:
KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}

View File

@ -13,9 +13,26 @@ on:
# seed the build cache.
branches:
- main
schedule:
- cron: '0 0,12 * * *' # Runs at 00:00 and 12:00 UTC daily
env:
GOTESTSUM_FORMAT: github-actions
jobs:
cleanups:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
steps:
- name: Clean up cache if running on schedule
if: ${{ github.event_name == 'schedule' }}
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: gh cache delete --all --repo databricks/cli || true
tests:
needs: cleanups
runs-on: ${{ matrix.os }}
strategy:
@ -28,20 +45,20 @@ jobs:
steps:
- name: Checkout repository and submodules
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
- name: Setup Python
uses: actions/setup-python@v5
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
with:
python-version: '3.9'
- name: Install uv
uses: astral-sh/setup-uv@v4
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
- name: Set go env
run: |
@ -54,17 +71,22 @@ jobs:
make vendor
pip3 install wheel
- name: Run tests
run: make testonly
- name: Run tests with coverage
run: make cover
golangci:
linters:
needs: cleanups
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: |
go.sum
.golangci.yaml
- name: Run go mod tidy
run: |
go mod tidy
@ -73,22 +95,39 @@ jobs:
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@v6
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
with:
version: v1.62.2
version: v1.63.4
args: --timeout=15m
- name: Run ruff
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
with:
version: "0.9.1"
args: "format --check"
validate-bundle-schema:
needs: cleanups
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: |
go.sum
bundle/internal/schema/*.*
- name: Verify that the schema is up to date
run: |
if ! ( make schema && git diff --exit-code ); then
echo "The schema is not up to date. Please run 'make schema' and commit the changes."
exit 1
fi
# Github repo: https://github.com/ajv-validator/ajv-cli
- name: Install ajv-cli

View File

@ -20,18 +20,21 @@ on:
jobs:
goreleaser:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
steps:
- name: Checkout repository and submodules
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
fetch-tags: true
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
@ -45,27 +48,27 @@ jobs:
- name: Run GoReleaser
id: releaser
uses: goreleaser/goreleaser-action@v6
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
with:
version: ~> v2
args: release --snapshot --skip docker
- name: Upload macOS binaries
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: cli_darwin_snapshot
path: |
dist/*_darwin_*/
- name: Upload Linux binaries
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: cli_linux_snapshot
path: |
dist/*_linux_*/
- name: Upload Windows binaries
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: cli_windows_snapshot
path: |
@ -85,7 +88,7 @@ jobs:
# Snapshot release may only be updated for commits to the main branch.
if: github.ref == 'refs/heads/main'
uses: softprops/action-gh-release@v1
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
with:
name: Snapshot
prerelease: true

View File

@ -9,20 +9,24 @@ on:
jobs:
goreleaser:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
outputs:
artifacts: ${{ steps.releaser.outputs.artifacts }}
runs-on: ubuntu-latest
steps:
- name: Checkout repository and submodules
uses: actions/checkout@v4
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
with:
fetch-depth: 0
fetch-tags: true
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with:
go-version: 1.23.4
go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
@ -33,7 +37,7 @@ jobs:
# Log into the GitHub Container Registry. The goreleaser action will create
# the docker images and push them to the GitHub Container Registry.
- uses: "docker/login-action@v3"
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: "ghcr.io"
username: "${{ github.actor }}"
@ -42,11 +46,11 @@ jobs:
# QEMU is required to build cross platform docker images using buildx.
# It allows virtualization of the CPU architecture at the application level.
- name: Set up QEMU dependency
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
- name: Run GoReleaser
id: releaser
uses: goreleaser/goreleaser-action@v6
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
with:
version: ~> v2
args: release
@ -54,8 +58,12 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
create-setup-cli-release-pr:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
needs: goreleaser
runs-on: ubuntu-latest
steps:
- name: Set VERSION variable from tag
run: |
@ -63,7 +71,7 @@ jobs:
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
- name: Update setup-cli
uses: actions/github-script@v7
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
script: |
@ -78,8 +86,12 @@ jobs:
});
create-homebrew-tap-release-pr:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
needs: goreleaser
runs-on: ubuntu-latest
steps:
- name: Set VERSION variable from tag
run: |
@ -87,7 +99,7 @@ jobs:
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
- name: Update homebrew-tap
uses: actions/github-script@v7
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
script: |
@ -115,8 +127,12 @@ jobs:
});
create-vscode-extension-update-pr:
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco
needs: goreleaser
runs-on: ubuntu-latest
steps:
- name: Set VERSION variable from tag
run: |
@ -124,7 +140,7 @@ jobs:
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
- name: Update CLI version in the VSCode extension
uses: actions/github-script@v7
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
with:
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
script: |

6
.gitignore vendored
View File

@ -20,14 +20,12 @@ dist/
*.log
coverage.txt
coverage-acceptance.txt
__pycache__
*.pyc
.terraform
.terraform.lock.hcl
.vscode/launch.json
.vscode/tasks.json
.databricks
.ruff_cache

View File

@ -11,12 +11,24 @@ linters:
- gofmt
- gofumpt
- goimports
- testifylint
- intrange
- mirror
- perfsprint
- unconvert
linters-settings:
govet:
enable-all: true
disable:
- fieldalignment
- shadow
settings:
printf:
funcs:
- (github.com/databricks/cli/internal/testutil.TestingT).Infof
- (github.com/databricks/cli/internal/testutil.TestingT).Errorf
- (github.com/databricks/cli/internal/testutil.TestingT).Fatalf
- (github.com/databricks/cli/internal/testutil.TestingT).Skipf
gofmt:
rewrite-rules:
- pattern: 'a[b:len(a)]'
@ -32,7 +44,14 @@ linters-settings:
gofumpt:
module-path: github.com/databricks/cli
extra-rules: true
#goimports:
# local-prefixes: github.com/databricks/cli
testifylint:
enable-all: true
disable:
# good check, but we have too many assert.(No)?Errorf? so excluding for now
- require-error
copyloopvar:
check-alias: true
issues:
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
max-issues-per-linter: 1000
max-same-issues: 1000

View File

@ -1,5 +1,139 @@
# Version changelog
## [Release] Release v0.241.2
This is a bugfix release to address an issue where jobs with tasks with a
libraries section with PyPI packages could not be deployed.
Bundles:
* Revert changes related to basename check for local libraries ([#2345](https://github.com/databricks/cli/pull/2345)).
## [Release] Release v0.241.1
Bundles:
* Fix for regression deploying resources with PyPi and Maven library types ([#2341](https://github.com/databricks/cli/pull/2341)).
## [Release] Release v0.241.0
Bundles:
* Added support to generate Git based jobs ([#2304](https://github.com/databricks/cli/pull/2304)).
* Added support for run_as in pipelines ([#2287](https://github.com/databricks/cli/pull/2287)).
* Raise an error when there are multiple local libraries with the same basename used ([#2297](https://github.com/databricks/cli/pull/2297)).
* Fix env variable for AzureCli local config ([#2248](https://github.com/databricks/cli/pull/2248)).
* Accept JSON files in includes section ([#2265](https://github.com/databricks/cli/pull/2265)).
* Always print warnings and errors; clean up format ([#2213](https://github.com/databricks/cli/pull/2213))
API Changes:
* Added `databricks account budget-policy` command group.
* Added `databricks lakeview-embedded` command group.
* Added `databricks query-execution` command group.
* Added `databricks account enable-ip-access-lists` command group.
* Added `databricks redash-config` command group.
OpenAPI commit c72c58f97b950fcb924a90ef164bcb10cfcd5ece (2025-02-03)
Dependency updates:
* Upgrade to TF provider 1.65.1 ([#2328](https://github.com/databricks/cli/pull/2328)).
* Bump github.com/hashicorp/terraform-exec from 0.21.0 to 0.22.0 ([#2237](https://github.com/databricks/cli/pull/2237)).
* Bump github.com/spf13/pflag from 1.0.5 to 1.0.6 ([#2281](https://github.com/databricks/cli/pull/2281)).
* Bump github.com/databricks/databricks-sdk-go from 0.56.1 to 0.57.0 ([#2321](https://github.com/databricks/cli/pull/2321)).
* Bump golang.org/x/oauth2 from 0.25.0 to 0.26.0 ([#2322](https://github.com/databricks/cli/pull/2322)).
* Bump golang.org/x/term from 0.28.0 to 0.29.0 ([#2325](https://github.com/databricks/cli/pull/2325)).
* Bump golang.org/x/text from 0.21.0 to 0.22.0 ([#2323](https://github.com/databricks/cli/pull/2323)).
* Bump golang.org/x/mod from 0.22.0 to 0.23.0 ([#2324](https://github.com/databricks/cli/pull/2324)).
## [Release] Release v0.240.0
Bundles:
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
API Changes:
* Added `databricks access-control` command group.
* Added `databricks serving-endpoints http-request` command.
* Changed `databricks serving-endpoints create` command with new required argument order.
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
* Changed `databricks recipients update` command return type to become non-empty.
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
Dependency updates:
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
## [Release] Release v0.239.1
CLI:
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
Bundles:
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
## [Release] Release v0.239.0
### New feature announcement
#### Databricks Apps support
You can now manage Databricks Apps using DABs by defining an `app` resource in your bundle configuration.
For more information see Databricks documentation https://docs.databricks.com/en/dev-tools/bundles/resources.html#app
#### Referencing complex variables in complex variables
You can now reference complex variables within other complex variables.
For more details see https://github.com/databricks/cli/pull/2157
CLI:
* Filter out system clusters in cluster picker ([#2131](https://github.com/databricks/cli/pull/2131)).
* Add command line flags for fields that are not in the API request body ([#2155](https://github.com/databricks/cli/pull/2155)).
Bundles:
* Added support for Databricks Apps in DABs ([#1928](https://github.com/databricks/cli/pull/1928)).
* Allow artifact path to be located outside the sync root ([#2128](https://github.com/databricks/cli/pull/2128)).
* Retry app deployment if there is an active deployment in progress ([#2153](https://github.com/databricks/cli/pull/2153)).
* Resolve variables in a loop ([#2164](https://github.com/databricks/cli/pull/2164)).
* Improve resolution of complex variables within complex variables ([#2157](https://github.com/databricks/cli/pull/2157)).
* Added output message to warn about slower deployments with apps ([#2161](https://github.com/databricks/cli/pull/2161)).
* Patch references to UC schemas to capture dependencies automatically ([#1989](https://github.com/databricks/cli/pull/1989)).
* Format default-python template ([#2110](https://github.com/databricks/cli/pull/2110)).
* Encourage the use of root_path in production to ensure single deployment ([#1712](https://github.com/databricks/cli/pull/1712)).
* Log warnings to stderr for "bundle validate -o json" ([#2109](https://github.com/databricks/cli/pull/2109)).
API Changes:
* Changed `databricks account federation-policy update` command with new required argument order.
* Changed `databricks account service-principal-federation-policy update` command with new required argument order.
OpenAPI commit 779817ed8d63031f5ea761fbd25ee84f38feec0d (2025-01-08)
Dependency updates:
* Upgrade TF provider to 1.63.0 ([#2162](https://github.com/databricks/cli/pull/2162)).
* Bump golangci-lint version to v1.63.4 from v1.63.1 ([#2114](https://github.com/databricks/cli/pull/2114)).
* Bump astral-sh/setup-uv from 4 to 5 ([#2116](https://github.com/databricks/cli/pull/2116)).
* Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 ([#2080](https://github.com/databricks/cli/pull/2080)).
* Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 ([#2079](https://github.com/databricks/cli/pull/2079)).
* Bump golang.org/x/term from 0.27.0 to 0.28.0 ([#2078](https://github.com/databricks/cli/pull/2078)).
* Bump github.com/databricks/databricks-sdk-go from 0.54.0 to 0.55.0 ([#2126](https://github.com/databricks/cli/pull/2126)).
## [Release] Release v0.238.0
Bundles:
* Fix finding Python within virtualenv on Windows ([#2034](https://github.com/databricks/cli/pull/2034)).
* Include missing field descriptions in JSON schema ([#2045](https://github.com/databricks/cli/pull/2045)).
* Add validation for volume referenced from `artifact_path` ([#2050](https://github.com/databricks/cli/pull/2050)).
* Handle `${workspace.file_path}` references in source-linked deployments ([#2046](https://github.com/databricks/cli/pull/2046)).
* Set the write bit for files written during template initialization ([#2068](https://github.com/databricks/cli/pull/2068)).
## [Release] Release v0.237.0
Bundles:

View File

@ -1,45 +1,66 @@
default: build
default: vendor fmt lint tidy
lint: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..."
@golangci-lint run --fix ./...
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
lintcheck: vendor
@echo "✓ Linting source code with https://golangci-lint.run/ ..."
@golangci-lint run ./...
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
test: lint testonly
testonly:
@echo "✓ Running tests ..."
@gotestsum --format pkgname-and-test-fails --no-summary=skipped --raw-command go test -v -json -short -coverprofile=coverage.txt ./...
lint:
golangci-lint run --fix
coverage: test
@echo "✓ Opening coverage for unit tests ..."
@go tool cover -html=coverage.txt
tidy:
@# not part of golangci-lint, apparently
go mod tidy
lintcheck:
golangci-lint run ./...
# Note 'make lint' will do formatting as well. However, if there are compilation errors,
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
fmt:
ruff format -q
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
test:
${GOTESTSUM_CMD} -- ${PACKAGES}
cover:
rm -fr ./acceptance/build/cover/
VERBOSE_TEST=1 CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
rm -fr ./acceptance/build/cover-merged/
mkdir -p acceptance/build/cover-merged/
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
go tool covdata textfmt -i acceptance/build/cover-merged -o coverage-acceptance.txt
showcover:
go tool cover -html=coverage.txt
acc-showcover:
go tool cover -html=coverage-acceptance.txt
build: vendor
@echo "✓ Building source code with go build ..."
@go build -mod vendor
go build -mod vendor
snapshot:
@echo "✓ Building dev snapshot"
@go build -o .databricks/databricks
go build -o .databricks/databricks
vendor:
@echo "✓ Filling vendor folder with library code ..."
@go mod vendor
go mod vendor
schema:
@echo "✓ Generating json-schema ..."
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
docs:
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
integration:
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./acceptance ./integration/..." -- -parallel 4 -timeout=2h
integration: vendor
$(INTEGRATION)
integration-short:
$(INTEGRATION) -short
integration-short: vendor
VERBOSE_TEST=1 $(INTEGRATION) -short
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short
.PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs

13
NOTICE
View File

@ -105,3 +105,16 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
https://github.com/hexops/gotextdiff
Copyright (c) 2009 The Go Authors. All rights reserved.
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
https://github.com/BurntSushi/toml
Copyright (c) 2013 TOML authors
https://github.com/BurntSushi/toml/blob/master/COPYING
dario.cat/mergo
Copyright (c) 2013 Dario Castañé. All rights reserved.
Copyright (c) 2012 The Go Authors. All rights reserved.
https://github.com/darccio/mergo/blob/master/LICENSE
https://github.com/gorilla/mux
Copyright (c) 2023 The Gorilla Authors. All rights reserved.
https://github.com/gorilla/mux/blob/main/LICENSE

1
acceptance/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
build

21
acceptance/README.md Normal file
View File

@ -0,0 +1,21 @@
Acceptance tests are blackbox tests that are run against compiled binary.
Currently these tests are run against "fake" HTTP server pretending to be Databricks API. However, they will be extended to run against real environment as regular integration tests.
To author a test,
- Add a new directory under `acceptance`. Any level of nesting is supported.
- Add `databricks.yml` there.
- Add `script` with commands to run, e.g. `$CLI bundle validate`. The test case is recognized by presence of `script`.
The test runner will run script and capture output and compare it with `output.txt` file in the same directory.
In order to write `output.txt` for the first time or overwrite it with the current output pass -update flag to go test.
The scripts are run with `bash -e` so any errors will be propagated. They are captured in `output.txt` by appending `Exit code: N` line at the end.
For more complex tests one can also use:
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
- `trace` helper: prints the arguments before executing the command.
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
See [selftest](./selftest) for a toy test.

View File

@ -0,0 +1,672 @@
package acceptance_test
import (
"context"
"encoding/json"
"errors"
"flag"
"fmt"
"io"
"os"
"os/exec"
"path/filepath"
"regexp"
"runtime"
"slices"
"sort"
"strings"
"testing"
"time"
"unicode/utf8"
"github.com/google/uuid"
"github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/testdiff"
"github.com/databricks/cli/libs/testserver"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/require"
)
var (
KeepTmp bool
NoRepl bool
VerboseTest bool = os.Getenv("VERBOSE_TEST") != ""
)
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
// example: var SingleTest = "bundle/variables/empty"
var SingleTest = ""
// If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
// CLI commands. The $CLI in test scripts is a helper that just forwards command-line arguments to this server (see bin/callserver.py).
// Also disables parallelism in tests.
var InprocessMode bool
func init() {
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
flag.BoolVar(&NoRepl, "norepl", false, "Do not apply any replacements (for debugging)")
}
const (
EntryPointScript = "script"
CleanupScript = "script.cleanup"
PrepareScript = "script.prepare"
MaxFileSize = 100_000
// Filename to save replacements to (used by diff.py)
ReplsFile = "repls.json"
)
var Scripts = map[string]bool{
EntryPointScript: true,
CleanupScript: true,
PrepareScript: true,
}
var Ignored = map[string]bool{
ReplsFile: true,
}
func TestAccept(t *testing.T) {
testAccept(t, InprocessMode, SingleTest)
}
func TestInprocessMode(t *testing.T) {
if InprocessMode {
t.Skip("Already tested by TestAccept")
}
require.Equal(t, 1, testAccept(t, true, "selftest/basic"))
require.Equal(t, 1, testAccept(t, true, "selftest/server"))
}
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
repls := testdiff.ReplacementsContext{}
cwd, err := os.Getwd()
require.NoError(t, err)
buildDir := filepath.Join(cwd, "build", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
// Download terraform and provider and create config; this also creates build directory.
RunCommand(t, []string{"python3", filepath.Join(cwd, "install_terraform.py"), "--targetdir", buildDir}, ".")
coverDir := os.Getenv("CLI_GOCOVERDIR")
if coverDir != "" {
require.NoError(t, os.MkdirAll(coverDir, os.ModePerm))
coverDir, err = filepath.Abs(coverDir)
require.NoError(t, err)
t.Logf("Writing coverage to %s", coverDir)
}
execPath := ""
if InprocessMode {
cmdServer := StartCmdServer(t)
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
execPath = filepath.Join(cwd, "bin", "callserver.py")
} else {
execPath = BuildCLI(t, buildDir, coverDir)
}
t.Setenv("CLI", execPath)
repls.SetPath(execPath, "[CLI]")
// Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
tempHomeDir := t.TempDir()
repls.SetPath(tempHomeDir, "[TMPHOME]")
t.Logf("$TMPHOME=%v", tempHomeDir)
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
uvCache := getUVDefaultCacheDir(t)
t.Setenv("UV_CACHE_DIR", uvCache)
cloudEnv := os.Getenv("CLOUD_ENV")
if cloudEnv == "" {
defaultServer := testserver.New(t)
AddHandlers(defaultServer)
t.Setenv("DATABRICKS_DEFAULT_HOST", defaultServer.URL)
homeDir := t.TempDir()
// Do not read user's ~/.databrickscfg
t.Setenv(env.HomeEnvVar(), homeDir)
}
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
terraformExecPath := filepath.Join(buildDir, "terraform")
if runtime.GOOS == "windows" {
terraformExecPath += ".exe"
}
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
t.Setenv("TERRAFORM", terraformExecPath)
repls.SetPath(terraformExecPath, "[TERRAFORM]")
// do it last so that full paths match first:
repls.SetPath(buildDir, "[BUILD_DIR]")
testdiff.PrepareReplacementsDevVersion(t, &repls)
testdiff.PrepareReplacementSdkVersion(t, &repls)
testdiff.PrepareReplacementsGoVersion(t, &repls)
repls.SetPath(cwd, "[TESTROOT]")
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
testDirs := getTests(t)
require.NotEmpty(t, testDirs)
if singleTest != "" {
testDirs = slices.DeleteFunc(testDirs, func(n string) bool {
return n != singleTest
})
require.NotEmpty(t, testDirs, "singleTest=%#v did not match any tests\n%#v", singleTest, testDirs)
}
for _, dir := range testDirs {
t.Run(dir, func(t *testing.T) {
if !InprocessMode {
t.Parallel()
}
runTest(t, dir, coverDir, repls.Clone())
})
}
return len(testDirs)
}
func getTests(t *testing.T) []string {
testDirs := make([]string, 0, 128)
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
name := filepath.Base(path)
if name == EntryPointScript {
// Presence of 'script' marks a test case in this directory
testName := filepath.ToSlash(filepath.Dir(path))
testDirs = append(testDirs, testName)
}
return nil
})
require.NoError(t, err)
sort.Strings(testDirs)
return testDirs
}
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
config, configPath := LoadConfig(t, dir)
isEnabled, isPresent := config.GOOS[runtime.GOOS]
if isPresent && !isEnabled {
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
}
cloudEnv := os.Getenv("CLOUD_ENV")
if config.LocalOnly && cloudEnv != "" {
t.Skipf("Disabled via LocalOnly setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
}
var tmpDir string
var err error
if KeepTmp {
tempDirBase := filepath.Join(os.TempDir(), "acceptance")
_ = os.Mkdir(tempDirBase, 0o755)
tmpDir, err = os.MkdirTemp(tempDirBase, "")
require.NoError(t, err)
t.Logf("Created directory: %s", tmpDir)
} else {
tmpDir = t.TempDir()
}
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
scriptContents := readMergedScriptContents(t, dir)
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
inputs := make(map[string]bool, 2)
outputs := make(map[string]bool, 2)
err = CopyDir(dir, tmpDir, inputs, outputs)
require.NoError(t, err)
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
cmd := exec.Command(args[0], args[1:]...)
cmd.Env = os.Environ()
var workspaceClient *databricks.WorkspaceClient
var user iam.User
// Start a new server with a custom configuration if the acceptance test
// specifies a custom server stubs.
var server *testserver.Server
if cloudEnv == "" {
// Start a new server for this test if either:
// 1. A custom server spec is defined in the test configuration.
// 2. The test is configured to record requests and assert on them. We need
// a duplicate of the default server to record requests because the default
// server otherwise is a shared resource.
databricksLocalHost := os.Getenv("DATABRICKS_DEFAULT_HOST")
if len(config.Server) > 0 || config.RecordRequests {
server = testserver.New(t)
server.RecordRequests = config.RecordRequests
server.IncludeRequestHeaders = config.IncludeRequestHeaders
// We want later stubs takes precedence, because then leaf configs take precedence over parent directory configs
// In gorilla/mux earlier handlers take precedence, so we need to reverse the order
slices.Reverse(config.Server)
for _, stub := range config.Server {
require.NotEmpty(t, stub.Pattern)
items := strings.Split(stub.Pattern, " ")
require.Len(t, items, 2)
server.Handle(items[0], items[1], func(req testserver.Request) any {
return stub.Response
})
}
// The earliest handlers take precedence, add default handlers last
AddHandlers(server)
databricksLocalHost = server.URL
}
// Each local test should use a new token that will result into a new fake workspace,
// so that test don't interfere with each other.
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
config := databricks.Config{
Host: databricksLocalHost,
Token: "dbapi" + tokenSuffix,
}
workspaceClient, err = databricks.NewWorkspaceClient(&config)
require.NoError(t, err)
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+config.Host)
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+config.Token)
// For the purposes of replacements, use testUser.
// Note, users might have overriden /api/2.0/preview/scim/v2/Me but that should not affect the replacement:
user = testUser
} else {
// Use whatever authentication mechanism is configured by the test runner.
workspaceClient, err = databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err)
pUser, err := workspaceClient.CurrentUser.Me(context.Background())
require.NoError(t, err, "Failed to get current user")
user = *pUser
}
testdiff.PrepareReplacementsUser(t, &repls, user)
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
// Must be added PrepareReplacementsUser, otherwise conflicts with [USERNAME]
testdiff.PrepareReplacementsUUID(t, &repls)
// User replacements come last:
repls.Repls = append(repls.Repls, config.Repls...)
// Save replacements to temp test directory so that it can be read by diff.py
replsJson, err := json.MarshalIndent(repls.Repls, "", " ")
require.NoError(t, err)
testutil.WriteFile(t, filepath.Join(tmpDir, ReplsFile), string(replsJson))
if coverDir != "" {
// Creating individual coverage directory for each test, because writing to the same one
// results in sporadic failures like this one (only if tests are running in parallel):
// +error: coverage meta-data emit failed: writing ... rename .../tmp.covmeta.b3f... .../covmeta.b3f2c...: no such file or directory
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
err := os.MkdirAll(coverDir, os.ModePerm)
require.NoError(t, err)
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
}
absDir, err := filepath.Abs(dir)
require.NoError(t, err)
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)
// Write combined output to a file
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
require.NoError(t, err)
cmd.Stdout = out
cmd.Stderr = out
cmd.Dir = tmpDir
err = cmd.Run()
// Write the requests made to the server to a output file if the test is
// configured to record requests.
if config.RecordRequests {
f, err := os.OpenFile(filepath.Join(tmpDir, "out.requests.txt"), os.O_CREATE|os.O_WRONLY, 0o644)
require.NoError(t, err)
for _, req := range server.Requests {
reqJson, err := json.MarshalIndent(req, "", " ")
require.NoErrorf(t, err, "Failed to indent: %#v", req)
reqJsonWithRepls := repls.Replace(string(reqJson))
_, err = f.WriteString(reqJsonWithRepls + "\n")
require.NoError(t, err)
}
err = f.Close()
require.NoError(t, err)
}
// Include exit code in output (if non-zero)
formatOutput(out, err)
require.NoError(t, out.Close())
printedRepls := false
// Compare expected outputs
for relPath := range outputs {
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
}
// Make sure there are not unaccounted for new files
files := ListDir(t, tmpDir)
unexpected := []string{}
for _, relPath := range files {
if _, ok := inputs[relPath]; ok {
continue
}
if _, ok := outputs[relPath]; ok {
continue
}
if _, ok := Ignored[relPath]; ok {
continue
}
unexpected = append(unexpected, relPath)
if strings.HasPrefix(relPath, "out") {
// We have a new file starting with "out"
// Show the contents & support overwrite mode for it:
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
}
}
if len(unexpected) > 0 {
t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
}
}
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
pathRef := filepath.Join(dirRef, relPath)
pathNew := filepath.Join(dirNew, relPath)
bufRef, okRef := tryReading(t, pathRef)
bufNew, okNew := tryReading(t, pathNew)
if !okRef && !okNew {
t.Errorf("Both files are missing or have errors: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
return
}
valueRef := testdiff.NormalizeNewlines(bufRef)
valueNew := testdiff.NormalizeNewlines(bufNew)
// Apply replacements to the new value only.
// The reference value is stored after applying replacements.
if !NoRepl {
valueNew = repls.Replace(valueNew)
}
// The test did not produce an expected output file.
if okRef && !okNew {
t.Errorf("Missing output file: %s", relPath)
if testdiff.OverwriteMode {
t.Logf("Removing output file: %s", relPath)
require.NoError(t, os.Remove(pathRef))
}
return
}
// The test produced an unexpected output file.
if !okRef && okNew {
t.Errorf("Unexpected output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
if testdiff.OverwriteMode {
t.Logf("Writing output file: %s", relPath)
testutil.WriteFile(t, pathRef, valueNew)
}
return
}
// Compare the reference and new values.
equal := testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
if !equal && testdiff.OverwriteMode {
t.Logf("Overwriting existing output file: %s", relPath)
testutil.WriteFile(t, pathRef, valueNew)
}
if VerboseTest && !equal && printedRepls != nil && !*printedRepls {
*printedRepls = true
var items []string
for _, item := range repls.Repls {
items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
}
t.Log("Available replacements:\n" + strings.Join(items, "\n"))
}
}
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
func readMergedScriptContents(t *testing.T, dir string) string {
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
// Wrap script contents in a subshell such that changing the working
// directory only affects the main script and not cleanup.
scriptContents = "(\n" + scriptContents + ")\n"
prepares := []string{}
cleanups := []string{}
for {
x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
if ok {
cleanups = append(cleanups, x)
}
x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
if ok {
prepares = append(prepares, x)
}
if dir == "" || dir == "." {
break
}
dir = filepath.Dir(dir)
require.True(t, filepath.IsLocal(dir))
}
slices.Reverse(prepares)
prepares = append(prepares, scriptContents)
prepares = append(prepares, cleanups...)
return strings.Join(prepares, "\n")
}
func BuildCLI(t *testing.T, buildDir, coverDir string) string {
execPath := filepath.Join(buildDir, "databricks")
if runtime.GOOS == "windows" {
execPath += ".exe"
}
args := []string{
"go", "build",
"-mod", "vendor",
"-o", execPath,
}
if coverDir != "" {
args = append(args, "-cover")
}
if runtime.GOOS == "windows" {
// Get this error on my local Windows:
// error obtaining VCS status: exit status 128
// Use -buildvcs=false to disable VCS stamping.
args = append(args, "-buildvcs=false")
}
RunCommand(t, args, "..")
return execPath
}
func copyFile(src, dst string) error {
in, err := os.Open(src)
if err != nil {
return err
}
defer in.Close()
out, err := os.Create(dst)
if err != nil {
return err
}
defer out.Close()
_, err = io.Copy(out, in)
return err
}
func formatOutput(w io.Writer, err error) {
if err == nil {
return
}
if exiterr, ok := err.(*exec.ExitError); ok {
exitCode := exiterr.ExitCode()
fmt.Fprintf(w, "\nExit code: %d\n", exitCode)
} else {
fmt.Fprintf(w, "\nError: %s\n", err)
}
}
func tryReading(t *testing.T, path string) (string, bool) {
info, err := os.Stat(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) {
t.Errorf("%s: %s", path, err)
}
return "", false
}
if info.Size() > MaxFileSize {
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
return "", false
}
data, err := os.ReadFile(path)
if err != nil {
// already checked ErrNotExist above
t.Errorf("%s: %s", path, err)
return "", false
}
if !utf8.Valid(data) {
t.Errorf("%s: not valid utf-8", path)
return "", false
}
return string(data), true
}
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
return filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
name := info.Name()
relPath, err := filepath.Rel(src, path)
if err != nil {
return err
}
if strings.HasPrefix(relPath, "out") {
if !info.IsDir() {
outputs[relPath] = true
}
return nil
} else {
inputs[relPath] = true
}
if _, ok := Scripts[name]; ok {
return nil
}
destPath := filepath.Join(dst, relPath)
if info.IsDir() {
return os.MkdirAll(destPath, info.Mode())
}
return copyFile(path, destPath)
})
}
func ListDir(t *testing.T, src string) []string {
var files []string
err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
if err != nil {
// Do not FailNow here.
// The output comparison is happening after this call which includes output.txt which
// includes errors printed by commands which include explanation why a given file cannot be read.
t.Errorf("Error when listing %s: path=%s: %s", src, path, err)
return nil
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(src, path)
if err != nil {
return err
}
files = append(files, relPath)
return nil
})
if err != nil {
t.Errorf("Failed to list %s: %s", src, err)
}
return files
}
func getUVDefaultCacheDir(t *testing.T) string {
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
// the default cache directory is
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
cacheDir, err := os.UserCacheDir()
require.NoError(t, err)
if runtime.GOOS == "windows" {
return cacheDir + "\\uv\\cache"
} else {
return cacheDir + "/uv"
}
}
func RunCommand(t *testing.T, args []string, dir string) {
start := time.Now()
cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = dir
out, err := cmd.CombinedOutput()
elapsed := time.Since(start)
t.Logf("%s took %s", args, elapsed)
require.NoError(t, err, "%s failed: %s\n%s", args, err, out)
if len(out) > 0 {
t.Logf("%s output: %s", args, out)
}
}

View File

@ -0,0 +1,5 @@
[DEFAULT]
host = $DATABRICKS_HOST
[profile_name]
host = https://test@non-existing-subdomain.databricks.com

View File

@ -0,0 +1,14 @@
bundle:
name: test-auth
workspace:
host: $DATABRICKS_HOST
targets:
dev:
default: true
workspace:
host: $DATABRICKS_HOST
prod:
workspace:
host: https://bar.com

View File

@ -0,0 +1,32 @@
=== Inside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Inside the bundle, target flags
>>> errcode [CLI] current-user me -t dev
"[USERNAME]"
=== Inside the bundle, target and matching profile
>>> errcode [CLI] current-user me -t dev -p DEFAULT
"[USERNAME]"
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
>>> errcode [CLI] current-user me -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Exit code: 1
=== Inside the bundle, target and not matching profile
>>> errcode [CLI] current-user me -t dev -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Exit code: 1
=== Outside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Outside the bundle, profile flag
>>> errcode [CLI] current-user me -p profile_name
"[USERNAME]"

View File

@ -0,0 +1,30 @@
# Replace placeholder with an actual host URL
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
envsubst < .databrickscfg > out && mv out .databrickscfg
export DATABRICKS_CONFIG_FILE=.databrickscfg
host=$DATABRICKS_HOST
unset DATABRICKS_HOST
title "Inside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Inside the bundle, target flags"
trace errcode $CLI current-user me -t dev | jq .userName
title "Inside the bundle, target and matching profile"
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
trace errcode $CLI current-user me -p profile_name | jq .userName
title "Inside the bundle, target and not matching profile"
trace errcode $CLI current-user me -t dev -p profile_name
cd ..
export DATABRICKS_HOST=$host
title "Outside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Outside the bundle, profile flag"
trace errcode $CLI current-user me -p profile_name | jq .userName

View File

@ -0,0 +1,12 @@
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
[[Repls]]
Old='DATABRICKS_HOST'
New='DATABRICKS_TARGET'
[[Repls]]
Old='DATABRICKS_URL'
New='DATABRICKS_TARGET'

View File

@ -0,0 +1,12 @@
{
"headers": {
"Authorization": [
"Basic [ENCODED_AUTH]"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/basic"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,8 @@
# Unset the token which is configured by default
# in acceptance tests
export DATABRICKS_TOKEN=""
export DATABRICKS_USERNAME=username
export DATABRICKS_PASSWORD=password
$CLI current-user me

View File

@ -0,0 +1,4 @@
# "username:password" in base64 is dXNlcm5hbWU6cGFzc3dvcmQ=, expect to see this in Authorization header
[[Repls]]
Old = "dXNlcm5hbWU6cGFzc3dvcmQ="
New = "[ENCODED_AUTH]"

View File

@ -0,0 +1,34 @@
{
"headers": {
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
]
},
"method": "GET",
"path": "/oidc/.well-known/oauth-authorization-server"
}
{
"headers": {
"Authorization": [
"Basic [ENCODED_AUTH]"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
]
},
"method": "POST",
"path": "/oidc/v1/token",
"raw_body": "grant_type=client_credentials\u0026scope=all-apis"
}
{
"headers": {
"Authorization": [
"Bearer oauth-token"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/oauth-m2m"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,8 @@
# Unset the token which is configured by default
# in acceptance tests
export DATABRICKS_TOKEN=""
export DATABRICKS_CLIENT_ID=client_id
export DATABRICKS_CLIENT_SECRET=client_secret
$CLI current-user me

View File

@ -0,0 +1,5 @@
# "client_id:client_secret" in base64 is Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ=, expect to
# see this in Authorization header
[[Repls]]
Old = "Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="
New = "[ENCODED_AUTH]"

View File

@ -0,0 +1,12 @@
{
"headers": {
"Authorization": [
"Bearer dapi1234"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/pat"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,3 @@
export DATABRICKS_TOKEN=dapi1234
$CLI current-user me

View File

@ -0,0 +1,20 @@
LocalOnly = true
RecordRequests = true
IncludeRequestHeaders = ["Authorization", "User-Agent"]
[[Repls]]
Old = '(linux|darwin|windows)'
New = '[OS]'
[[Repls]]
Old = " upstream/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " upstream-version/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " cicd/[A-Za-z0-9.-]+"
New = ""

31
acceptance/bin/callserver.py Executable file
View File

@ -0,0 +1,31 @@
#!/usr/bin/env python3
import sys
import os
import json
import urllib.request
from urllib.parse import urlencode
env = {}
for key, value in os.environ.items():
if len(value) > 10_000:
sys.stderr.write(f"Dropping key={key} value len={len(value)}\n")
continue
env[key] = value
q = {
"args": " ".join(sys.argv[1:]),
"cwd": os.getcwd(),
"env": json.dumps(env),
}
url = os.environ["CMD_SERVER_URL"] + "/?" + urlencode(q)
if len(url) > 100_000:
sys.exit("url too large")
resp = urllib.request.urlopen(url)
assert resp.status == 200, (resp.status, resp.url, resp.headers)
result = json.load(resp)
sys.stderr.write(result["stderr"])
sys.stdout.write(result["stdout"])
exitcode = int(result["exitcode"])
sys.exit(exitcode)

56
acceptance/bin/diff.py Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""This script implements "diff -r -U2 dir1 dir2" but applies replacements first"""
import sys
import difflib
import json
import re
from pathlib import Path
def replaceAll(patterns, s):
for comp, new in patterns:
s = comp.sub(new, s)
return s
def main():
d1, d2 = sys.argv[1:]
d1, d2 = Path(d1), Path(d2)
with open("repls.json") as f:
repls = json.load(f)
patterns = []
for r in repls:
try:
c = re.compile(r["Old"])
patterns.append((c, r["New"]))
except re.error as e:
print(f"Regex error for pattern {r}: {e}", file=sys.stderr)
files1 = [str(p.relative_to(d1)) for p in d1.rglob("*") if p.is_file()]
files2 = [str(p.relative_to(d2)) for p in d2.rglob("*") if p.is_file()]
set1 = set(files1)
set2 = set(files2)
for f in sorted(set1 | set2):
p1 = d1 / f
p2 = d2 / f
if f not in set2:
print(f"Only in {d1}: {f}")
elif f not in set1:
print(f"Only in {d2}: {f}")
else:
a = [replaceAll(patterns, x) for x in p1.read_text().splitlines(True)]
b = [replaceAll(patterns, x) for x in p2.read_text().splitlines(True)]
if a != b:
p1_str = p1.as_posix()
p2_str = p2.as_posix()
for line in difflib.unified_diff(a, b, p1_str, p2_str, "", "", 2):
print(line, end="")
if __name__ == "__main__":
main()

22
acceptance/bin/sort_blocks.py Executable file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env python3
"""
Helper to sort blocks in text file. A block is a set of lines separated from others by empty line.
This is to workaround non-determinism in the output.
"""
import sys
blocks = []
for line in sys.stdin:
if not line.strip():
if blocks and blocks[-1]:
blocks.append("")
continue
if not blocks:
blocks.append("")
blocks[-1] += line
blocks.sort()
print("\n".join(blocks))

10
acceptance/bin/sort_lines.py Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/env python3
"""
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
"""
import sys
lines = sys.stdin.readlines()
lines.sort()
sys.stdout.write("".join(lines))

View File

@ -0,0 +1,2 @@
bundle:
name: debug

View File

@ -0,0 +1,15 @@
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true

View File

@ -0,0 +1,93 @@
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
10:07:59 Debug: Apply pid=12345 mutator=load
10:07:59 Info: Phase: load pid=12345 mutator=load
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=<func>
10:07:59 Debug: Apply pid=12345 mutator=initialize
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
< HTTP/1.1 200 OK
< {
< "id": "[USERID]",
< "userName": "[USERNAME]"
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found
< {
< "message": "Workspace path not found"
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
> {
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
> }
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 200 OK
< {
< "object_type": "DIRECTORY",
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
10:07:59 Info: completed execution pid=12345 exit_code=0

View File

@ -0,0 +1,7 @@
Name: debug
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/debug/default
Validation OK!

View File

@ -0,0 +1,4 @@
$CLI bundle validate --debug 2> full.stderr.txt
grep -vw parallel full.stderr.txt > out.stderr.txt
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
rm full.stderr.txt

View File

@ -0,0 +1,18 @@
LocalOnly = true
[[Repls]]
# The keys are unsorted and also vary per OS
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '
New = 'Environment variables for Terraform: ...redacted... '
[[Repls]]
Old = 'pid=[0-9]+'
New = 'pid=12345'
[[Repls]]
Old = '\d\d:\d\d:\d\d'
New = '10:07:59'
[[Repls]]
Old = '\\'
New = '/'

View File

@ -0,0 +1,2 @@
bundle:
name: git_job

View File

@ -0,0 +1,17 @@
resources:
jobs:
out:
name: gitjob
tasks:
- task_key: test_task
notebook_task:
notebook_path: some/test/notebook.py
- task_key: test_task_2
notebook_task:
notebook_path: /Workspace/Users/foo@bar.com/some/test/notebook.py
source: WORKSPACE
git_source:
git_branch: main
git_commit: abcdef
git_provider: github
git_url: https://git.databricks.com

View File

@ -0,0 +1,2 @@
Job is using Git source, skipping downloading files
Job configuration successfully saved to out.job.yml

View File

@ -0,0 +1 @@
$CLI bundle generate job --existing-job-id 1234 --config-dir . --key out

View File

@ -0,0 +1,33 @@
LocalOnly = true # This test needs to run against stubbed Databricks API
[[Server]]
Pattern = "GET /api/2.1/jobs/get"
Response.Body = '''
{
"job_id": 11223344,
"settings": {
"name": "gitjob",
"git_source": {
"git_url": "https://git.databricks.com",
"git_provider": "github",
"git_branch": "main",
"git_commit": "abcdef"
},
"tasks": [
{
"task_key": "test_task",
"notebook_task": {
"notebook_path": "some/test/notebook.py"
}
},
{
"task_key": "test_task_2",
"notebook_task": {
"source": "WORKSPACE",
"notebook_path": "/Workspace/Users/foo@bar.com/some/test/notebook.py"
}
}
]
}
}
'''

View File

@ -0,0 +1,2 @@
bundle:
name: git-permerror

View File

@ -0,0 +1,81 @@
=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
>>> chmod 000 .git
>>> [CLI] bundle validate
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
Found 1 error
Exit code: 1
>>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
>>> chmod 000 .git/HEAD
>>> [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{
"bundle_root_path": "."
}
=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
>>> chmod 000 .git/config
>>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}

View File

@ -0,0 +1,26 @@
mkdir myrepo
cd myrepo
cp ../databricks.yml .
git-repo-init
mkdir -p subdir/a/b
printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
trace chmod 000 .git
errcode trace $CLI bundle validate
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
chmod 700 .git
trace chmod 000 .git/HEAD
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
chmod 666 .git/HEAD
trace chmod 000 .git/config
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
cd ..
rm -fr myrepo

View File

@ -0,0 +1,5 @@
Badness = "inferred flag is set to true incorrect; bundle_root_path is not correct; Warn and Error talk about the same; Warn goes to stderr, Error goes to stdout (for backward compat); Warning about permissions repeated twice"
[GOOS]
# This test relies on chmod which does not work on Windows
windows = false

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle deploy --help
Deploy bundle
Usage:
databricks bundle deploy [flags]
Flags:
--auto-approve Skip interactive approvals that might be required for deployment.
-c, --cluster-id string Override cluster in the deployment with the given cluster ID.
--fail-on-active-runs Fail if there are running jobs or pipelines in the deployment.
--force Force-override Git branch validation.
--force-lock Force acquisition of deployment lock.
-h, --help help for deploy
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle deploy --help

View File

@ -0,0 +1,22 @@
>>> [CLI] bundle deployment --help
Deployment related commands
Usage:
databricks bundle deployment [command]
Available Commands:
bind Bind bundle-defined resources to existing resources
unbind Unbind bundle-defined resources from its managed remote resource
Flags:
-h, --help help for deployment
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Use "databricks bundle deployment [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle deployment --help

View File

@ -0,0 +1,18 @@
>>> [CLI] bundle destroy --help
Destroy deployed bundle resources
Usage:
databricks bundle destroy [flags]
Flags:
--auto-approve Skip interactive approvals for deleting resources and files
--force-lock Force acquisition of deployment lock.
-h, --help help for destroy
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle destroy --help

View File

@ -0,0 +1,24 @@
>>> [CLI] bundle generate dashboard --help
Generate configuration for a dashboard
Usage:
databricks bundle generate dashboard [flags]
Flags:
-s, --dashboard-dir string directory to write the dashboard representation to (default "src")
--existing-id string ID of the dashboard to generate configuration for
--existing-path string workspace path of the dashboard to generate configuration for
-f, --force force overwrite existing files in the output directory
-h, --help help for dashboard
--resource string resource key of dashboard to watch for changes
-d, --resource-dir string directory to write the configuration to (default "resources")
--watch watch for changes to the dashboard and update the configuration
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate dashboard --help

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle generate job --help
Generate bundle configuration for a job
Usage:
databricks bundle generate job [flags]
Flags:
-d, --config-dir string Dir path where the output config will be stored (default "resources")
--existing-job-id int Job ID of the job to generate config for
-f, --force Force overwrite existing files in the output directory
-h, --help help for job
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate job --help

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle generate pipeline --help
Generate bundle configuration for a pipeline
Usage:
databricks bundle generate pipeline [flags]
Flags:
-d, --config-dir string Dir path where the output config will be stored (default "resources")
--existing-pipeline-id string ID of the pipeline to generate config for
-f, --force Force overwrite existing files in the output directory
-h, --help help for pipeline
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate pipeline --help

View File

@ -0,0 +1,25 @@
>>> [CLI] bundle generate --help
Generate bundle configuration
Usage:
databricks bundle generate [command]
Available Commands:
app Generate bundle configuration for a Databricks app
dashboard Generate configuration for a dashboard
job Generate bundle configuration for a job
pipeline Generate bundle configuration for a pipeline
Flags:
-h, --help help for generate
--key string resource key to use for the generated configuration
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Use "databricks bundle generate [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle generate --help

View File

@ -0,0 +1,31 @@
>>> [CLI] bundle init --help
Initialize using a bundle template.
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
- default-python: The default Python template for Notebooks / Delta Live Tables / Workflows
- default-sql: The default SQL template for .sql files that run with Databricks SQL
- dbt-sql: The dbt SQL template (databricks.com/blog/delivering-cost-effective-data-real-time-dbt-and-databricks)
- mlops-stacks: The Databricks MLOps Stacks template (github.com/databricks/mlops-stacks)
- a local file system path with a template directory
- a Git repository URL, e.g. https://github.com/my/repository
See https://docs.databricks.com/en/dev-tools/bundles/templates.html for more information on templates.
Usage:
databricks bundle init [TEMPLATE_PATH] [flags]
Flags:
--branch string Git branch to use for template initialization
--config-file string JSON file containing key value pairs of input parameters required for template initialization.
-h, --help help for init
--output-dir string Directory to write the initialized template to.
--tag string Git tag to use for template initialization
--template-dir string Directory path within a Git repository containing the template.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle init --help

View File

@ -0,0 +1,17 @@
>>> [CLI] bundle open --help
Open a resource in the browser
Usage:
databricks bundle open [flags]
Flags:
--force-pull Skip local cache and load the state from the remote workspace
-h, --help help for open
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle open --help

View File

@ -0,0 +1,57 @@
>>> [CLI] bundle run --help
Run the job or pipeline identified by KEY.
The KEY is the unique identifier of the resource to run. In addition to
customizing the run using any of the available flags, you can also specify
keyword or positional arguments as shown in these examples:
databricks bundle run my_job -- --key1 value1 --key2 value2
Or:
databricks bundle run my_job -- value1 value2 value3
If the specified job uses job parameters or the job has a notebook task with
parameters, the first example applies and flag names are mapped to the
parameter names.
If the specified job does not use job parameters and the job has a Python file
task or a Python wheel task, the second example applies.
Usage:
databricks bundle run [flags] KEY
Job Flags:
--params stringToString comma separated k=v pairs for job parameters (default [])
Job Task Flags:
Note: please prefer use of job-level parameters (--param) over task-level parameters.
For more information, see https://docs.databricks.com/en/workflows/jobs/create-run-jobs.html#pass-parameters-to-a-databricks-job-task
--dbt-commands strings A list of commands to execute for jobs with DBT tasks.
--jar-params strings A list of parameters for jobs with Spark JAR tasks.
--notebook-params stringToString A map from keys to values for jobs with notebook tasks. (default [])
--pipeline-params stringToString A map from keys to values for jobs with pipeline tasks. (default [])
--python-named-params stringToString A map from keys to values for jobs with Python wheel tasks. (default [])
--python-params strings A list of parameters for jobs with Python tasks.
--spark-submit-params strings A list of parameters for jobs with Spark submit tasks.
--sql-params stringToString A map from keys to values for jobs with SQL tasks. (default [])
Pipeline Flags:
--full-refresh strings List of tables to reset and recompute.
--full-refresh-all Perform a full graph reset and recompute.
--refresh strings List of tables to update.
--refresh-all Perform a full graph update.
--validate-only Perform an update to validate graph correctness.
Flags:
-h, --help help for run
--no-wait Don't wait for the run to complete.
--restart Restart the run if it is already running.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle run --help

View File

@ -0,0 +1,16 @@
>>> [CLI] bundle schema --help
Generate JSON Schema for bundle configuration
Usage:
databricks bundle schema [flags]
Flags:
-h, --help help for schema
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle schema --help

View File

@ -0,0 +1,17 @@
>>> [CLI] bundle summary --help
Summarize resources deployed by this bundle
Usage:
databricks bundle summary [flags]
Flags:
--force-pull Skip local cache and load the state from the remote workspace
-h, --help help for summary
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle summary --help

View File

@ -0,0 +1,19 @@
>>> [CLI] bundle sync --help
Synchronize bundle tree to the workspace
Usage:
databricks bundle sync [flags]
Flags:
--full perform full synchronization (default is incremental)
-h, --help help for sync
--interval duration file system polling interval (for --watch) (default 1s)
--output type type of the output format
--watch watch local file system for changes
Global Flags:
--debug enable debug logging
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle sync --help

View File

@ -0,0 +1,16 @@
>>> [CLI] bundle validate --help
Validate configuration
Usage:
databricks bundle validate [flags]
Flags:
-h, --help help for validate
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle validate --help

View File

@ -0,0 +1,33 @@
>>> [CLI] bundle --help
Databricks Asset Bundles let you express data/AI/analytics projects as code.
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
Usage:
databricks bundle [command]
Available Commands:
deploy Deploy bundle
deployment Deployment related commands
destroy Destroy deployed bundle resources
generate Generate bundle configuration
init Initialize using a bundle template
open Open a resource in the browser
run Run a job or pipeline update
schema Generate JSON Schema for bundle configuration
summary Summarize resources deployed by this bundle
sync Synchronize bundle tree to the workspace
validate Validate configuration
Flags:
-h, --help help for bundle
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks bundle [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle --help

View File

@ -0,0 +1,6 @@
bundle:
name: non_yaml_in_includes
include:
- test.py
- resources/*.yml

View File

@ -0,0 +1,10 @@
Error: Files in the 'include' configuration section must be YAML or JSON files.
in databricks.yml:5:4
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
Name: non_yaml_in_includes
Found 1 error
Exit code: 1

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -0,0 +1 @@
print("Hello world")

View File

@ -1,9 +1,6 @@
bundle:
name: clusters
workspace:
host: https://acme.cloud.databricks.com/
resources:
clusters:
foo:

View File

@ -0,0 +1,33 @@
>>> [CLI] bundle validate -o json -t default
{
"autoscale": {
"max_workers": 7,
"min_workers": 2
},
"cluster_name": "foo",
"custom_tags": {},
"node_type_id": "i3.xlarge",
"num_workers": 2,
"spark_conf": {
"spark.executor.memory": "2g"
},
"spark_version": "13.3.x-scala2.12"
}
>>> [CLI] bundle validate -o json -t development
{
"autoscale": {
"max_workers": 3,
"min_workers": 1
},
"cluster_name": "foo-override",
"custom_tags": {},
"node_type_id": "m5.xlarge",
"num_workers": 3,
"spark_conf": {
"spark.executor.memory": "4g",
"spark.executor.memory2": "4g"
},
"spark_version": "15.2.x-scala2.12"
}

View File

@ -0,0 +1,2 @@
trace $CLI bundle validate -o json -t default | jq .resources.clusters.foo
trace $CLI bundle validate -o json -t development | jq .resources.clusters.foo

View File

@ -1,9 +1,6 @@
bundle:
name: override_job_cluster
workspace:
host: https://acme.cloud.databricks.com/
resources:
jobs:
foo:

View File

@ -0,0 +1,56 @@
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.xlarge",
"num_workers": 1,
"spark_version": "13.3.x-scala2.12"
}
}
],
"name": "job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {}
}
}
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.2xlarge",
"num_workers": 4,
"spark_version": "13.3.x-scala2.12"
}
}
],
"name": "job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {}
}
}

View File

@ -0,0 +1,2 @@
trace $CLI bundle validate -o json -t development | jq '.resources.jobs'
trace $CLI bundle validate -o json -t staging | jq '.resources.jobs'

View File

@ -0,0 +1,36 @@
bundle:
name: override_job_cluster
variables:
mykey:
default: key
resources:
jobs:
foo:
name: job
job_clusters:
- job_cluster_key: key
new_cluster:
spark_version: 13.3.x-scala2.12
targets:
development:
resources:
jobs:
foo:
job_clusters:
- job_cluster_key: "${var.mykey}"
new_cluster:
node_type_id: i3.xlarge
num_workers: 1
staging:
resources:
jobs:
foo:
job_clusters:
- job_cluster_key: "${var.mykey}"
new_cluster:
node_type_id: i3.2xlarge
num_workers: 4

View File

@ -0,0 +1,74 @@
>>> [CLI] bundle validate -o json -t development
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.xlarge",
"num_workers": 1,
"spark_version": "13.3.x-scala2.12"
}
}
],
"name": "job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {}
}
}
>>> [CLI] bundle validate -t development
Name: override_job_cluster
Target: development
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
Validation OK!
>>> [CLI] bundle validate -o json -t staging
{
"foo": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "key",
"new_cluster": {
"node_type_id": "i3.2xlarge",
"num_workers": 4,
"spark_version": "13.3.x-scala2.12"
}
}
],
"name": "job",
"permissions": [],
"queue": {
"enabled": true
},
"tags": {}
}
}
>>> [CLI] bundle validate -t staging
Name: override_job_cluster
Target: staging
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
Validation OK!

Some files were not shown because too many files have changed in this diff Show More