Merge branch 'main' into dependabot/github_actions/softprops/action-gh-release-2

This commit is contained in:
Andrew Nester 2025-02-27 14:08:01 +00:00 committed by GitHub
commit 98f9930774
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
769 changed files with 27694 additions and 4435 deletions

View File

@ -1 +1 @@
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d 99f644e72261ef5ecf8d74db20f4b7a1e09723cc

View File

@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
{{- end }} {{- end }}
) )
{{- $excludeFromJson := list "http-request"}}
func new{{.PascalName}}() *cobra.Command { func new{{.PascalName}}() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
{{- if .Request}} {{- if .Request}}
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
{{- if .RequestBodyField }} {{- if .RequestBodyField }}
{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{} {{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
{{- end }} {{- end }}
{{- if .CanUseJson}} {{- if $canUseJson}}
var {{.CamelName}}Json flags.JsonFlag var {{.CamelName}}Json flags.JsonFlag
{{- end}} {{- end}}
{{- end}} {{- end}}
@ -135,14 +138,14 @@ func new{{.PascalName}}() *cobra.Command {
{{- $request = .RequestBodyField.Entity -}} {{- $request = .RequestBodyField.Entity -}}
{{- end -}} {{- end -}}
{{if $request }}// TODO: short flags {{if $request }}// TODO: short flags
{{- if .CanUseJson}} {{- if $canUseJson}}
cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
{{- end}} {{- end}}
{{$method := .}} {{$method := .}}
{{ if not .IsJsonOnly }} {{ if not .IsJsonOnly }}
{{range $request.Fields -}} {{range .AllFields -}}
{{- if not .Required -}} {{- if not .Required -}}
{{if .Entity.IsObject }}// TODO: complex arg: {{.Name}} {{if .Entity.IsObject}}{{if not (eq . $method.RequestBodyField) }}// TODO: complex arg: {{.Name}}{{end}}
{{else if .Entity.IsAny }}// TODO: any: {{.Name}} {{else if .Entity.IsAny }}// TODO: any: {{.Name}}
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}} {{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}} {{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
@ -176,8 +179,8 @@ func new{{.PascalName}}() *cobra.Command {
{{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} {{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} {{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}} {{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}} {{- $onlyPathArgsRequiredAsPositionalArguments := and .Request (eq (len .RequiredPositionalArguments) (len .Request.RequiredPathFields)) -}}
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}} {{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}} {{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
{{- $atleastOneArgumentWithDescription := false -}} {{- $atleastOneArgumentWithDescription := false -}}
@ -215,12 +218,12 @@ func new{{.PascalName}}() *cobra.Command {
cmd.Args = func(cmd *cobra.Command, args []string) error { cmd.Args = func(cmd *cobra.Command, args []string) error {
{{- if $hasDifferentArgsWithJsonFlag }} {{- if $hasDifferentArgsWithJsonFlag }}
if cmd.Flags().Changed("json") { if cmd.Flags().Changed("json") {
err := root.ExactArgs({{len $request.RequiredPathFields}})(cmd, args) err := root.ExactArgs({{len .Request.RequiredPathFields}})(cmd, args)
if err != nil { if err != nil {
{{- if eq 0 (len $request.RequiredPathFields) }} {{- if eq 0 (len .Request.RequiredPathFields) }}
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide{{- range $index, $field := $request.RequiredFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input") return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide{{- range $index, $field := $request.RequiredFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
{{- else }} {{- else }}
return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := $request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input") return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := .Request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
{{- end }} {{- end }}
} }
return nil return nil
@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
ctx := cmd.Context() ctx := cmd.Context()
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}} {{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
{{- if .Request }} {{- if .Request }}
{{ if .CanUseJson }} {{ if $canUseJson }}
if cmd.Flags().Changed("json") { if cmd.Flags().Changed("json") {
diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }}) diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
if diags.HasError() { if diags.HasError() {
@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
}{{- end}} }{{- end}}
{{- if $hasPosArgs }} {{- if $hasPosArgs }}
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else { {{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
{{- end}} {{- end}}
{{- if $hasIdPrompt}} {{- if $hasIdPrompt}}
if len(args) == 0 { if len(args) == 0 {
@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
{{$method := .}} {{$method := .}}
{{- range $arg, $field := .RequiredPositionalArguments}} {{- range $arg, $field := .RequiredPositionalArguments}}
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}} {{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
{{- end -}} {{- end -}}
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} {{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
} }
{{- end}} {{- end}}
@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
{{- $method := .Method -}} {{- $method := .Method -}}
{{- $arg := .Arg -}} {{- $arg := .Arg -}}
{{- $hasIdPrompt := .HasIdPrompt -}} {{- $hasIdPrompt := .HasIdPrompt -}}
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }} {{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
{{- if $optionalIfJsonIsUsed }} {{- if $optionalIfJsonIsUsed }}
if !cmd.Flags().Changed("json") { if !cmd.Flags().Changed("json") {
{{- end }} {{- end }}

6
.gitattributes vendored
View File

@ -1,11 +1,13 @@
cmd/account/access-control/access-control.go linguist-generated=true cmd/account/access-control/access-control.go linguist-generated=true
cmd/account/billable-usage/billable-usage.go linguist-generated=true cmd/account/billable-usage/billable-usage.go linguist-generated=true
cmd/account/budget-policy/budget-policy.go linguist-generated=true
cmd/account/budgets/budgets.go linguist-generated=true cmd/account/budgets/budgets.go linguist-generated=true
cmd/account/cmd.go linguist-generated=true cmd/account/cmd.go linguist-generated=true
cmd/account/credentials/credentials.go linguist-generated=true cmd/account/credentials/credentials.go linguist-generated=true
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated=true
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
cmd/account/federation-policy/federation-policy.go linguist-generated=true cmd/account/federation-policy/federation-policy.go linguist-generated=true
@ -31,6 +33,7 @@ cmd/account/users/users.go linguist-generated=true
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
cmd/account/workspaces/workspaces.go linguist-generated=true cmd/account/workspaces/workspaces.go linguist-generated=true
cmd/workspace/access-control/access-control.go linguist-generated=true
cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
@ -74,6 +77,7 @@ cmd/workspace/instance-pools/instance-pools.go linguist-generated=true
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
cmd/workspace/jobs/jobs.go linguist-generated=true cmd/workspace/jobs/jobs.go linguist-generated=true
cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true
cmd/workspace/lakeview/lakeview.go linguist-generated=true cmd/workspace/lakeview/lakeview.go linguist-generated=true
cmd/workspace/libraries/libraries.go linguist-generated=true cmd/workspace/libraries/libraries.go linguist-generated=true
cmd/workspace/metastores/metastores.go linguist-generated=true cmd/workspace/metastores/metastores.go linguist-generated=true
@ -98,11 +102,13 @@ cmd/workspace/providers/providers.go linguist-generated=true
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
cmd/workspace/queries/queries.go linguist-generated=true cmd/workspace/queries/queries.go linguist-generated=true
cmd/workspace/query-execution/query-execution.go linguist-generated=true
cmd/workspace/query-history/query-history.go linguist-generated=true cmd/workspace/query-history/query-history.go linguist-generated=true
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
cmd/workspace/recipients/recipients.go linguist-generated=true cmd/workspace/recipients/recipients.go linguist-generated=true
cmd/workspace/redash-config/redash-config.go linguist-generated=true
cmd/workspace/registered-models/registered-models.go linguist-generated=true cmd/workspace/registered-models/registered-models.go linguist-generated=true
cmd/workspace/repos/repos.go linguist-generated=true cmd/workspace/repos/repos.go linguist-generated=true
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true

1
.github/CODEOWNERS vendored
View File

@ -1 +1,2 @@
* @pietern @andrewnester @shreyas-goenka @denik * @pietern @andrewnester @shreyas-goenka @denik
cmd/labs @alexott @nfx

View File

@ -18,7 +18,7 @@ jobs:
pull-requests: write pull-requests: write
steps: steps:
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0 - uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
with: with:
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled. stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled. stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.

View File

@ -20,7 +20,7 @@ jobs:
steps: steps:
- name: Generate GitHub App Token - name: Generate GitHub App Token
id: generate-token id: generate-token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1 uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with: with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }} app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }} private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -23,7 +23,7 @@ jobs:
steps: steps:
- name: Generate GitHub App Token - name: Generate GitHub App Token
id: generate-token id: generate-token
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1 uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
with: with:
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }} app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }} private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}

View File

@ -10,19 +10,65 @@ on:
jobs: jobs:
publish-to-winget-pkgs: publish-to-winget-pkgs:
runs-on: runs-on:
group: databricks-protected-runner-group group: databricks-deco-testing-runner-group
labels: windows-server-latest labels: ubuntu-latest-deco
environment: release environment: release
steps: steps:
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2 - name: Checkout repository and submodules
with: uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
identifier: Databricks.DatabricksCLI
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases # When updating the version of komac, make sure to update the checksum in the next step.
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }} # Find both at https://github.com/russellbanks/Komac/releases.
fork-user: eng-dev-ecosystem-bot - name: Download komac binary
run: |
curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
- name: Verify komac binary
run: |
echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
- name: Untar komac binary to temporary path
run: |
mkdir -p $RUNNER_TEMP/komac
tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
- name: Add komac to PATH
run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
- name: Confirm komac version
run: komac --version
# Use the tag from the input, or the ref name if the input is not provided. # Use the tag from the input, or the ref name if the input is not provided.
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command. # The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
release-tag: ${{ inputs.tag || github.ref_name }} - name: Strip "v" prefix from version
id: strip_version
run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
- name: Get URLs of signed Windows binaries
id: get_windows_urls
run: |
urls=$(
gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
jq -r .assets[].browser_download_url | \
grep -E '_windows_.*-signed\.zip$' | \
tr '\n' ' '
)
if [ -z "$urls" ]; then
echo "No signed Windows binaries found" >&2
exit 1
fi
echo "urls=$urls" >> "$GITHUB_OUTPUT"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Publish to Winget
run: |
komac update Databricks.DatabricksCLI \
--version ${{ steps.strip_version.outputs.version }} \
--submit \
--urls ${{ steps.get_windows_urls.outputs.urls }} \
env:
KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}

View File

@ -50,7 +50,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0 uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with: with:
go-version: 1.23.4 go-version-file: go.mod
- name: Setup Python - name: Setup Python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
@ -71,10 +71,10 @@ jobs:
make vendor make vendor
pip3 install wheel pip3 install wheel
- name: Run tests - name: Run tests with coverage
run: make test run: make cover
golangci: linters:
needs: cleanups needs: cleanups
name: lint name: lint
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -82,7 +82,7 @@ jobs:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0 - uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with: with:
go-version: 1.23.4 go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key # Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: | cache-dependency-path: |
go.sum go.sum
@ -95,10 +95,15 @@ jobs:
# Exit with status code 1 if there are differences (i.e. unformatted files) # Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code git diff --exit-code
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1 uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
with: with:
version: v1.63.4 version: v1.63.4
args: --timeout=15m args: --timeout=15m
- name: Run ruff
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
with:
version: "0.9.1"
args: "format --check"
validate-bundle-schema: validate-bundle-schema:
needs: cleanups needs: cleanups
@ -111,7 +116,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0 uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with: with:
go-version: 1.23.4 go-version-file: go.mod
# Use different schema from regular job, to avoid overwriting the same key # Use different schema from regular job, to avoid overwriting the same key
cache-dependency-path: | cache-dependency-path: |
go.sum go.sum

View File

@ -34,7 +34,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0 uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with: with:
go-version: 1.23.4 go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file. # The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action # We include .goreleaser.yaml here to differentiate from the cache used by the push action

View File

@ -26,7 +26,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0 uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
with: with:
go-version: 1.23.4 go-version-file: go.mod
# The default cache key for this action considers only the `go.sum` file. # The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action # We include .goreleaser.yaml here to differentiate from the cache used by the push action

7
.gitignore vendored
View File

@ -20,14 +20,13 @@ dist/
*.log *.log
coverage.txt coverage.txt
coverage-acceptance.txt
__pycache__ __pycache__
*.pyc *.pyc
.terraform .idea
.terraform.lock.hcl
.vscode/launch.json .vscode/launch.json
.vscode/tasks.json .vscode/tasks.json
.databricks .ruff_cache

View File

@ -15,12 +15,20 @@ linters:
- intrange - intrange
- mirror - mirror
- perfsprint - perfsprint
- unconvert
linters-settings: linters-settings:
govet: govet:
enable-all: true enable-all: true
disable: disable:
- fieldalignment - fieldalignment
- shadow - shadow
settings:
printf:
funcs:
- (github.com/databricks/cli/internal/testutil.TestingT).Infof
- (github.com/databricks/cli/internal/testutil.TestingT).Errorf
- (github.com/databricks/cli/internal/testutil.TestingT).Fatalf
- (github.com/databricks/cli/internal/testutil.TestingT).Skipf
gofmt: gofmt:
rewrite-rules: rewrite-rules:
- pattern: 'a[b:len(a)]' - pattern: 'a[b:len(a)]'
@ -41,6 +49,8 @@ linters-settings:
disable: disable:
# good check, but we have too many assert.(No)?Errorf? so excluding for now # good check, but we have too many assert.(No)?Errorf? so excluding for now
- require-error - require-error
copyloopvar:
check-alias: true
issues: issues:
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/ exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
max-issues-per-linter: 1000 max-issues-per-linter: 1000

View File

@ -1,5 +1,154 @@
# Version changelog # Version changelog
## [Release] Release v0.242.0
Notable changes:
Starting this version CLI does not load bundle auth information when CLI command is executed inside the bundle directory with explicitly provided via `-p` flag profile.
For more details see the related GitHub issue https://github.com/databricks/cli/issues/1358
CLI:
* Do not load host from bundle for CLI commands when profile flag is used ([#2335](https://github.com/databricks/cli/pull/2335)).
* Fixed accessing required path parameters in CLI generation when --json flag ([#2373](https://github.com/databricks/cli/pull/2373)).
Bundles:
* Provide instructions for testing in the default-python template ([#2355](https://github.com/databricks/cli/pull/2355)).
* Remove `run_as` from the built-in templates ([#2044](https://github.com/databricks/cli/pull/2044)).
* Change warning about incomplete permissions section into a recommendation ([#2043](https://github.com/databricks/cli/pull/2043)).
* Refine `mode: production` diagnostic output ([#2236](https://github.com/databricks/cli/pull/2236)).
* Support serverless mode in default-python template (explicit prompt) ([#2377](https://github.com/databricks/cli/pull/2377)).
* Set default data_security_mode to "SINGLE_USER" in bundle templates ([#2372](https://github.com/databricks/cli/pull/2372)).
* Fixed spark version check for clusters defined in the same bundle ([#2374](https://github.com/databricks/cli/pull/2374)).
API Changes:
* Added `databricks genie get-message-query-result-by-attachment` command.
OpenAPI commit 99f644e72261ef5ecf8d74db20f4b7a1e09723cc (2025-02-11)
## [Release] Release v0.241.2
This is a bugfix release to address an issue where jobs with tasks with a
libraries section with PyPI packages could not be deployed.
Bundles:
* Revert changes related to basename check for local libraries ([#2345](https://github.com/databricks/cli/pull/2345)).
## [Release] Release v0.241.1
Bundles:
* Fix for regression deploying resources with PyPi and Maven library types ([#2341](https://github.com/databricks/cli/pull/2341)).
## [Release] Release v0.241.0
Bundles:
* Added support to generate Git based jobs ([#2304](https://github.com/databricks/cli/pull/2304)).
* Added support for run_as in pipelines ([#2287](https://github.com/databricks/cli/pull/2287)).
* Raise an error when there are multiple local libraries with the same basename used ([#2297](https://github.com/databricks/cli/pull/2297)).
* Fix env variable for AzureCli local config ([#2248](https://github.com/databricks/cli/pull/2248)).
* Accept JSON files in includes section ([#2265](https://github.com/databricks/cli/pull/2265)).
* Always print warnings and errors; clean up format ([#2213](https://github.com/databricks/cli/pull/2213))
API Changes:
* Added `databricks account budget-policy` command group.
* Added `databricks lakeview-embedded` command group.
* Added `databricks query-execution` command group.
* Added `databricks account enable-ip-access-lists` command group.
* Added `databricks redash-config` command group.
OpenAPI commit c72c58f97b950fcb924a90ef164bcb10cfcd5ece (2025-02-03)
Dependency updates:
* Upgrade to TF provider 1.65.1 ([#2328](https://github.com/databricks/cli/pull/2328)).
* Bump github.com/hashicorp/terraform-exec from 0.21.0 to 0.22.0 ([#2237](https://github.com/databricks/cli/pull/2237)).
* Bump github.com/spf13/pflag from 1.0.5 to 1.0.6 ([#2281](https://github.com/databricks/cli/pull/2281)).
* Bump github.com/databricks/databricks-sdk-go from 0.56.1 to 0.57.0 ([#2321](https://github.com/databricks/cli/pull/2321)).
* Bump golang.org/x/oauth2 from 0.25.0 to 0.26.0 ([#2322](https://github.com/databricks/cli/pull/2322)).
* Bump golang.org/x/term from 0.28.0 to 0.29.0 ([#2325](https://github.com/databricks/cli/pull/2325)).
* Bump golang.org/x/text from 0.21.0 to 0.22.0 ([#2323](https://github.com/databricks/cli/pull/2323)).
* Bump golang.org/x/mod from 0.22.0 to 0.23.0 ([#2324](https://github.com/databricks/cli/pull/2324)).
## [Release] Release v0.240.0
Bundles:
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
API Changes:
* Added `databricks access-control` command group.
* Added `databricks serving-endpoints http-request` command.
* Changed `databricks serving-endpoints create` command with new required argument order.
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
* Changed `databricks recipients update` command return type to become non-empty.
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
Dependency updates:
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
## [Release] Release v0.239.1
CLI:
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
Bundles:
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
## [Release] Release v0.239.0
### New feature announcement
#### Databricks Apps support
You can now manage Databricks Apps using DABs by defining an `app` resource in your bundle configuration.
For more information see Databricks documentation https://docs.databricks.com/en/dev-tools/bundles/resources.html#app
#### Referencing complex variables in complex variables
You can now reference complex variables within other complex variables.
For more details see https://github.com/databricks/cli/pull/2157
CLI:
* Filter out system clusters in cluster picker ([#2131](https://github.com/databricks/cli/pull/2131)).
* Add command line flags for fields that are not in the API request body ([#2155](https://github.com/databricks/cli/pull/2155)).
Bundles:
* Added support for Databricks Apps in DABs ([#1928](https://github.com/databricks/cli/pull/1928)).
* Allow artifact path to be located outside the sync root ([#2128](https://github.com/databricks/cli/pull/2128)).
* Retry app deployment if there is an active deployment in progress ([#2153](https://github.com/databricks/cli/pull/2153)).
* Resolve variables in a loop ([#2164](https://github.com/databricks/cli/pull/2164)).
* Improve resolution of complex variables within complex variables ([#2157](https://github.com/databricks/cli/pull/2157)).
* Added output message to warn about slower deployments with apps ([#2161](https://github.com/databricks/cli/pull/2161)).
* Patch references to UC schemas to capture dependencies automatically ([#1989](https://github.com/databricks/cli/pull/1989)).
* Format default-python template ([#2110](https://github.com/databricks/cli/pull/2110)).
* Encourage the use of root_path in production to ensure single deployment ([#1712](https://github.com/databricks/cli/pull/1712)).
* Log warnings to stderr for "bundle validate -o json" ([#2109](https://github.com/databricks/cli/pull/2109)).
API Changes:
* Changed `databricks account federation-policy update` command with new required argument order.
* Changed `databricks account service-principal-federation-policy update` command with new required argument order.
OpenAPI commit 779817ed8d63031f5ea761fbd25ee84f38feec0d (2025-01-08)
Dependency updates:
* Upgrade TF provider to 1.63.0 ([#2162](https://github.com/databricks/cli/pull/2162)).
* Bump golangci-lint version to v1.63.4 from v1.63.1 ([#2114](https://github.com/databricks/cli/pull/2114)).
* Bump astral-sh/setup-uv from 4 to 5 ([#2116](https://github.com/databricks/cli/pull/2116)).
* Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 ([#2080](https://github.com/databricks/cli/pull/2080)).
* Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 ([#2079](https://github.com/databricks/cli/pull/2079)).
* Bump golang.org/x/term from 0.27.0 to 0.28.0 ([#2078](https://github.com/databricks/cli/pull/2078)).
* Bump github.com/databricks/databricks-sdk-go from 0.54.0 to 0.55.0 ([#2126](https://github.com/databricks/cli/pull/2126)).
## [Release] Release v0.238.0 ## [Release] Release v0.238.0
Bundles: Bundles:

View File

@ -1,12 +1,18 @@
default: build default: vendor fmt lint tidy
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... . PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
GOTESTSUM_FORMAT ?= pkgname-and-test-fails GOTESTSUM_FORMAT ?= pkgname-and-test-fails
GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
lint: lint:
golangci-lint run --fix golangci-lint run --fix
tidy:
@# not part of golangci-lint, apparently
go mod tidy
lintcheck: lintcheck:
golangci-lint run ./... golangci-lint run ./...
@ -14,17 +20,26 @@ lintcheck:
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'. # formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint" # If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
fmt: fmt:
ruff format -q
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./... golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
test: test:
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- ${PACKAGES} ${GOTESTSUM_CMD} -- ${PACKAGES}
cover: cover:
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- -coverprofile=coverage.txt ${PACKAGES} rm -fr ./acceptance/build/cover/
VERBOSE_TEST=1 CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
rm -fr ./acceptance/build/cover-merged/
mkdir -p acceptance/build/cover-merged/
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
go tool covdata textfmt -i acceptance/build/cover-merged -o coverage-acceptance.txt
showcover: showcover:
go tool cover -html=coverage.txt go tool cover -html=coverage.txt
acc-showcover:
go tool cover -html=coverage-acceptance.txt
build: vendor build: vendor
go build -mod vendor go build -mod vendor
@ -37,12 +52,15 @@ vendor:
schema: schema:
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h docs:
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
integration: INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./acceptance ./integration/..." -- -parallel 4 -timeout=2h
integration: vendor
$(INTEGRATION) $(INTEGRATION)
integration-short: integration-short: vendor
$(INTEGRATION) -short VERBOSE_TEST=1 $(INTEGRATION) -short
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short .PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs

13
NOTICE
View File

@ -105,3 +105,16 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
https://github.com/hexops/gotextdiff https://github.com/hexops/gotextdiff
Copyright (c) 2009 The Go Authors. All rights reserved. Copyright (c) 2009 The Go Authors. All rights reserved.
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
https://github.com/BurntSushi/toml
Copyright (c) 2013 TOML authors
https://github.com/BurntSushi/toml/blob/master/COPYING
dario.cat/mergo
Copyright (c) 2013 Dario Castañé. All rights reserved.
Copyright (c) 2012 The Go Authors. All rights reserved.
https://github.com/darccio/mergo/blob/master/LICENSE
https://github.com/gorilla/mux
Copyright (c) 2023 The Gorilla Authors. All rights reserved.
https://github.com/gorilla/mux/blob/main/LICENSE

1
acceptance/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
build

View File

@ -17,3 +17,5 @@ For more complex tests one can also use:
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script. - `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
- `trace` helper: prints the arguments before executing the command. - `trace` helper: prints the arguments before executing the command.
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`. - custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
See [selftest](./selftest) for a toy test.

View File

@ -1,31 +1,66 @@
package acceptance_test package acceptance_test
import ( import (
"context"
"encoding/json"
"errors" "errors"
"flag"
"fmt" "fmt"
"io" "io"
"net/http"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"regexp"
"runtime" "runtime"
"slices" "slices"
"sort" "sort"
"strings" "strings"
"testing" "testing"
"time" "time"
"unicode/utf8"
"github.com/google/uuid"
"github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/internal/testutil"
"github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/env"
"github.com/databricks/cli/libs/testdiff" "github.com/databricks/cli/libs/testdiff"
"github.com/databricks/cli/libs/testserver"
"github.com/databricks/databricks-sdk-go"
"github.com/databricks/databricks-sdk-go/service/iam"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
var KeepTmp = os.Getenv("KEEP_TMP") != "" var (
KeepTmp bool
NoRepl bool
VerboseTest bool = os.Getenv("VERBOSE_TEST") != ""
)
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
// example: var SingleTest = "bundle/variables/empty"
var SingleTest = ""
// If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
// CLI commands. The $CLI in test scripts is a helper that just forwards command-line arguments to this server (see bin/callserver.py).
// Also disables parallelism in tests.
var InprocessMode bool
func init() {
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
flag.BoolVar(&NoRepl, "norepl", false, "Do not apply any replacements (for debugging)")
}
const ( const (
EntryPointScript = "script" EntryPointScript = "script"
CleanupScript = "script.cleanup" CleanupScript = "script.cleanup"
PrepareScript = "script.prepare" PrepareScript = "script.prepare"
MaxFileSize = 100_000
// Filename to save replacements to (used by diff.py)
ReplsFile = "repls.json"
) )
var Scripts = map[string]bool{ var Scripts = map[string]bool{
@ -34,38 +69,122 @@ var Scripts = map[string]bool{
PrepareScript: true, PrepareScript: true,
} }
var Ignored = map[string]bool{
ReplsFile: true,
}
func TestAccept(t *testing.T) { func TestAccept(t *testing.T) {
testAccept(t, InprocessMode, SingleTest)
}
func TestInprocessMode(t *testing.T) {
if InprocessMode {
t.Skip("Already tested by TestAccept")
}
require.Equal(t, 1, testAccept(t, true, "selftest/basic"))
require.Equal(t, 1, testAccept(t, true, "selftest/server"))
}
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
repls := testdiff.ReplacementsContext{}
cwd, err := os.Getwd() cwd, err := os.Getwd()
require.NoError(t, err) require.NoError(t, err)
execPath := BuildCLI(t, cwd) buildDir := filepath.Join(cwd, "build", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
// $CLI is what test scripts are using
// Download terraform and provider and create config; this also creates build directory.
RunCommand(t, []string{"python3", filepath.Join(cwd, "install_terraform.py"), "--targetdir", buildDir}, ".")
coverDir := os.Getenv("CLI_GOCOVERDIR")
if coverDir != "" {
require.NoError(t, os.MkdirAll(coverDir, os.ModePerm))
coverDir, err = filepath.Abs(coverDir)
require.NoError(t, err)
t.Logf("Writing coverage to %s", coverDir)
}
execPath := ""
if InprocessMode {
cmdServer := StartCmdServer(t)
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
execPath = filepath.Join(cwd, "bin", "callserver.py")
} else {
execPath = BuildCLI(t, buildDir, coverDir)
}
t.Setenv("CLI", execPath) t.Setenv("CLI", execPath)
repls.SetPath(execPath, "[CLI]")
// Make helper scripts available // Make helper scripts available
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH"))) t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
server := StartServer(t) tempHomeDir := t.TempDir()
AddHandlers(server) repls.SetPath(tempHomeDir, "[TMPHOME]")
// Redirect API access to local server: t.Logf("$TMPHOME=%v", tempHomeDir)
t.Setenv("DATABRICKS_HOST", fmt.Sprintf("http://127.0.0.1:%d", server.Port))
t.Setenv("DATABRICKS_TOKEN", "dapi1234") // Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
uvCache := getUVDefaultCacheDir(t)
t.Setenv("UV_CACHE_DIR", uvCache)
cloudEnv := os.Getenv("CLOUD_ENV")
if cloudEnv == "" {
defaultServer := testserver.New(t)
AddHandlers(defaultServer)
t.Setenv("DATABRICKS_DEFAULT_HOST", defaultServer.URL)
homeDir := t.TempDir() homeDir := t.TempDir()
// Do not read user's ~/.databrickscfg // Do not read user's ~/.databrickscfg
t.Setenv(env.HomeEnvVar(), homeDir) t.Setenv(env.HomeEnvVar(), homeDir)
}
repls := testdiff.ReplacementsContext{} terraformrcPath := filepath.Join(buildDir, ".terraformrc")
repls.Set(execPath, "$CLI") t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
terraformExecPath := filepath.Join(buildDir, "terraform")
if runtime.GOOS == "windows" {
terraformExecPath += ".exe"
}
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
t.Setenv("TERRAFORM", terraformExecPath)
repls.SetPath(terraformExecPath, "[TERRAFORM]")
// do it last so that full paths match first:
repls.SetPath(buildDir, "[BUILD_DIR]")
testdiff.PrepareReplacementsDevVersion(t, &repls)
testdiff.PrepareReplacementSdkVersion(t, &repls)
testdiff.PrepareReplacementsGoVersion(t, &repls)
repls.SetPath(cwd, "[TESTROOT]")
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
testDirs := getTests(t) testDirs := getTests(t)
require.NotEmpty(t, testDirs) require.NotEmpty(t, testDirs)
if singleTest != "" {
testDirs = slices.DeleteFunc(testDirs, func(n string) bool {
return n != singleTest
})
require.NotEmpty(t, testDirs, "singleTest=%#v did not match any tests\n%#v", singleTest, testDirs)
}
for _, dir := range testDirs { for _, dir := range testDirs {
t.Run(dir, func(t *testing.T) { t.Run(dir, func(t *testing.T) {
if !InprocessMode {
t.Parallel() t.Parallel()
runTest(t, dir, repls) }
runTest(t, dir, coverDir, repls.Clone())
}) })
} }
return len(testDirs)
} }
func getTests(t *testing.T) []string { func getTests(t *testing.T) []string {
@ -78,7 +197,8 @@ func getTests(t *testing.T) []string {
name := filepath.Base(path) name := filepath.Base(path)
if name == EntryPointScript { if name == EntryPointScript {
// Presence of 'script' marks a test case in this directory // Presence of 'script' marks a test case in this directory
testDirs = append(testDirs, filepath.Dir(path)) testName := filepath.ToSlash(filepath.Dir(path))
testDirs = append(testDirs, testName)
} }
return nil return nil
}) })
@ -88,7 +208,23 @@ func getTests(t *testing.T) []string {
return testDirs return testDirs
} }
func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) { func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
config, configPath := LoadConfig(t, dir)
isEnabled, isPresent := config.GOOS[runtime.GOOS]
if isPresent && !isEnabled {
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
}
cloudEnv := os.Getenv("CLOUD_ENV")
if !isTruePtr(config.Local) && cloudEnv == "" {
t.Skipf("Disabled via Local setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
}
if !isTruePtr(config.Cloud) && cloudEnv != "" {
t.Skipf("Disabled via Cloud setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
}
var tmpDir string var tmpDir string
var err error var err error
if KeepTmp { if KeepTmp {
@ -101,6 +237,8 @@ func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) {
tmpDir = t.TempDir() tmpDir = t.TempDir()
} }
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
scriptContents := readMergedScriptContents(t, dir) scriptContents := readMergedScriptContents(t, dir)
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents) testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
@ -111,89 +249,238 @@ func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) {
args := []string{"bash", "-euo", "pipefail", EntryPointScript} args := []string{"bash", "-euo", "pipefail", EntryPointScript}
cmd := exec.Command(args[0], args[1:]...) cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = tmpDir cmd.Env = os.Environ()
outB, err := cmd.CombinedOutput()
out := formatOutput(string(outB), err) var workspaceClient *databricks.WorkspaceClient
out = repls.Replace(out) var user iam.User
doComparison(t, filepath.Join(dir, "output.txt"), "script output", out)
for key := range outputs { // Start a new server with a custom configuration if the acceptance test
if key == "output.txt" { // specifies a custom server stubs.
// handled above var server *testserver.Server
continue
if cloudEnv == "" {
// Start a new server for this test if either:
// 1. A custom server spec is defined in the test configuration.
// 2. The test is configured to record requests and assert on them. We need
// a duplicate of the default server to record requests because the default
// server otherwise is a shared resource.
databricksLocalHost := os.Getenv("DATABRICKS_DEFAULT_HOST")
if len(config.Server) > 0 || isTruePtr(config.RecordRequests) {
server = testserver.New(t)
if isTruePtr(config.RecordRequests) {
requestsPath := filepath.Join(tmpDir, "out.requests.txt")
server.RecordRequestsCallback = func(request *testserver.Request) {
req := getLoggedRequest(request, config.IncludeRequestHeaders)
reqJson, err := json.MarshalIndent(req, "", " ")
assert.NoErrorf(t, err, "Failed to indent: %#v", req)
reqJsonWithRepls := repls.Replace(string(reqJson))
f, err := os.OpenFile(requestsPath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0o644)
assert.NoError(t, err)
defer f.Close()
_, err = f.WriteString(reqJsonWithRepls + "\n")
assert.NoError(t, err)
} }
pathNew := filepath.Join(tmpDir, key) }
newValBytes, err := os.ReadFile(pathNew)
if err != nil { // We want later stubs takes precedence, because then leaf configs take precedence over parent directory configs
if errors.Is(err, os.ErrNotExist) { // In gorilla/mux earlier handlers take precedence, so we need to reverse the order
t.Errorf("%s: expected to find this file but could not (%s)", key, tmpDir) slices.Reverse(config.Server)
for _, stub := range config.Server {
require.NotEmpty(t, stub.Pattern)
items := strings.Split(stub.Pattern, " ")
require.Len(t, items, 2)
server.Handle(items[0], items[1], func(req testserver.Request) any {
return stub.Response
})
}
// The earliest handlers take precedence, add default handlers last
AddHandlers(server)
databricksLocalHost = server.URL
}
// Each local test should use a new token that will result into a new fake workspace,
// so that test don't interfere with each other.
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
config := databricks.Config{
Host: databricksLocalHost,
Token: "dbapi" + tokenSuffix,
}
workspaceClient, err = databricks.NewWorkspaceClient(&config)
require.NoError(t, err)
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+config.Host)
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+config.Token)
// For the purposes of replacements, use testUser.
// Note, users might have overriden /api/2.0/preview/scim/v2/Me but that should not affect the replacement:
user = testUser
} else { } else {
t.Errorf("%s: could not read: %s", key, err) // Use whatever authentication mechanism is configured by the test runner.
workspaceClient, err = databricks.NewWorkspaceClient(&databricks.Config{})
require.NoError(t, err)
pUser, err := workspaceClient.CurrentUser.Me(context.Background())
require.NoError(t, err, "Failed to get current user")
user = *pUser
} }
continue
testdiff.PrepareReplacementsUser(t, &repls, user)
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
// Must be added PrepareReplacementsUser, otherwise conflicts with [USERNAME]
testdiff.PrepareReplacementsUUID(t, &repls)
// User replacements come last:
repls.Repls = append(repls.Repls, config.Repls...)
// Save replacements to temp test directory so that it can be read by diff.py
replsJson, err := json.MarshalIndent(repls.Repls, "", " ")
require.NoError(t, err)
testutil.WriteFile(t, filepath.Join(tmpDir, ReplsFile), string(replsJson))
if coverDir != "" {
// Creating individual coverage directory for each test, because writing to the same one
// results in sporadic failures like this one (only if tests are running in parallel):
// +error: coverage meta-data emit failed: writing ... rename .../tmp.covmeta.b3f... .../covmeta.b3f2c...: no such file or directory
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
err := os.MkdirAll(coverDir, os.ModePerm)
require.NoError(t, err)
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
} }
pathExpected := filepath.Join(dir, key)
newVal := repls.Replace(string(newValBytes)) absDir, err := filepath.Abs(dir)
doComparison(t, pathExpected, pathNew, newVal) require.NoError(t, err)
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)
// Write combined output to a file
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
require.NoError(t, err)
cmd.Stdout = out
cmd.Stderr = out
cmd.Dir = tmpDir
err = cmd.Run()
// Include exit code in output (if non-zero)
formatOutput(out, err)
require.NoError(t, out.Close())
printedRepls := false
// Compare expected outputs
for relPath := range outputs {
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
} }
// Make sure there are not unaccounted for new files // Make sure there are not unaccounted for new files
files, err := os.ReadDir(tmpDir) files := ListDir(t, tmpDir)
require.NoError(t, err) unexpected := []string{}
for _, relPath := range files {
for _, f := range files { if _, ok := inputs[relPath]; ok {
name := f.Name()
if _, ok := inputs[name]; ok {
continue continue
} }
if _, ok := outputs[name]; ok { if _, ok := outputs[relPath]; ok {
continue continue
} }
t.Errorf("Unexpected output: %s", f) if _, ok := Ignored[relPath]; ok {
if strings.HasPrefix(name, "out") { continue
}
unexpected = append(unexpected, relPath)
if strings.HasPrefix(relPath, "out") {
// We have a new file starting with "out" // We have a new file starting with "out"
// Show the contents & support overwrite mode for it: // Show the contents & support overwrite mode for it:
pathNew := filepath.Join(tmpDir, name) doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
newVal := testutil.ReadFile(t, pathNew)
newVal = repls.Replace(newVal)
doComparison(t, filepath.Join(dir, name), filepath.Join(tmpDir, name), newVal)
}
} }
} }
func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) { if len(unexpected) > 0 {
valueNew = testdiff.NormalizeNewlines(valueNew) t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
valueExpected := string(readIfExists(t, pathExpected))
valueExpected = testdiff.NormalizeNewlines(valueExpected)
testdiff.AssertEqualTexts(t, pathExpected, pathNew, valueExpected, valueNew)
if testdiff.OverwriteMode {
if valueNew != "" {
t.Logf("Overwriting: %s", pathExpected)
testutil.WriteFile(t, pathExpected, valueNew)
} else {
t.Logf("Removing: %s", pathExpected)
_ = os.Remove(pathExpected)
} }
} }
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
pathRef := filepath.Join(dirRef, relPath)
pathNew := filepath.Join(dirNew, relPath)
bufRef, okRef := tryReading(t, pathRef)
bufNew, okNew := tryReading(t, pathNew)
if !okRef && !okNew {
t.Errorf("Both files are missing or have errors: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
return
}
valueRef := testdiff.NormalizeNewlines(bufRef)
valueNew := testdiff.NormalizeNewlines(bufNew)
// Apply replacements to the new value only.
// The reference value is stored after applying replacements.
if !NoRepl {
valueNew = repls.Replace(valueNew)
}
// The test did not produce an expected output file.
if okRef && !okNew {
t.Errorf("Missing output file: %s", relPath)
if testdiff.OverwriteMode {
t.Logf("Removing output file: %s", relPath)
require.NoError(t, os.Remove(pathRef))
}
return
}
// The test produced an unexpected output file.
if !okRef && okNew {
t.Errorf("Unexpected output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
if testdiff.OverwriteMode {
t.Logf("Writing output file: %s", relPath)
testutil.WriteFile(t, pathRef, valueNew)
}
return
}
// Compare the reference and new values.
equal := testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
if !equal && testdiff.OverwriteMode {
t.Logf("Overwriting existing output file: %s", relPath)
testutil.WriteFile(t, pathRef, valueNew)
}
if VerboseTest && !equal && printedRepls != nil && !*printedRepls {
*printedRepls = true
var items []string
for _, item := range repls.Repls {
items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
}
t.Log("Available replacements:\n" + strings.Join(items, "\n"))
}
} }
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ... // Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir. // Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
func readMergedScriptContents(t *testing.T, dir string) string { func readMergedScriptContents(t *testing.T, dir string) string {
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript)) scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
// Wrap script contents in a subshell such that changing the working
// directory only affects the main script and not cleanup.
scriptContents = "(\n" + scriptContents + ")\n"
prepares := []string{} prepares := []string{}
cleanups := []string{} cleanups := []string{}
for { for {
x := readIfExists(t, filepath.Join(dir, CleanupScript)) x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
if len(x) > 0 { if ok {
cleanups = append(cleanups, string(x)) cleanups = append(cleanups, x)
} }
x = readIfExists(t, filepath.Join(dir, PrepareScript)) x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
if len(x) > 0 { if ok {
prepares = append(prepares, string(x)) prepares = append(prepares, x)
} }
if dir == "" || dir == "." { if dir == "" || dir == "." {
@ -210,28 +497,30 @@ func readMergedScriptContents(t *testing.T, dir string) string {
return strings.Join(prepares, "\n") return strings.Join(prepares, "\n")
} }
func BuildCLI(t *testing.T, cwd string) string { func BuildCLI(t *testing.T, buildDir, coverDir string) string {
execPath := filepath.Join(cwd, "build", "databricks") execPath := filepath.Join(buildDir, "databricks")
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
execPath += ".exe" execPath += ".exe"
} }
start := time.Now() args := []string{
args := []string{"go", "build", "-mod", "vendor", "-o", execPath} "go", "build",
cmd := exec.Command(args[0], args[1:]...) "-mod", "vendor",
cmd.Dir = ".." "-o", execPath,
out, err := cmd.CombinedOutput()
elapsed := time.Since(start)
t.Logf("%s took %s", args, elapsed)
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
if len(out) > 0 {
t.Logf("go build output: %s: %s", args, out)
} }
// Quick check + warm up cache: if coverDir != "" {
cmd = exec.Command(execPath, "--version") args = append(args, "-cover")
out, err = cmd.CombinedOutput() }
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
if runtime.GOOS == "windows" {
// Get this error on my local Windows:
// error obtaining VCS status: exit status 128
// Use -buildvcs=false to disable VCS stamping.
args = append(args, "-buildvcs=false")
}
RunCommand(t, args, "..")
return execPath return execPath
} }
@ -252,29 +541,45 @@ func copyFile(src, dst string) error {
return err return err
} }
func formatOutput(out string, err error) string { func formatOutput(w io.Writer, err error) {
if err == nil { if err == nil {
return out return
} }
if exiterr, ok := err.(*exec.ExitError); ok { if exiterr, ok := err.(*exec.ExitError); ok {
exitCode := exiterr.ExitCode() exitCode := exiterr.ExitCode()
out += fmt.Sprintf("\nExit code: %d\n", exitCode) fmt.Fprintf(w, "\nExit code: %d\n", exitCode)
} else { } else {
out += fmt.Sprintf("\nError: %s\n", err) fmt.Fprintf(w, "\nError: %s\n", err)
} }
return out
}
func readIfExists(t *testing.T, path string) []byte {
data, err := os.ReadFile(path)
if err == nil {
return data
} }
func tryReading(t *testing.T, path string) (string, bool) {
info, err := os.Stat(path)
if err != nil {
if !errors.Is(err, os.ErrNotExist) { if !errors.Is(err, os.ErrNotExist) {
t.Fatalf("%s: %s", path, err) t.Errorf("%s: %s", path, err)
} }
return []byte{} return "", false
}
if info.Size() > MaxFileSize {
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
return "", false
}
data, err := os.ReadFile(path)
if err != nil {
// already checked ErrNotExist above
t.Errorf("%s: %s", path, err)
return "", false
}
if !utf8.Valid(data) {
t.Errorf("%s: not valid utf-8", path)
return "", false
}
return string(data), true
} }
func CopyDir(src, dst string, inputs, outputs map[string]bool) error { func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
@ -289,8 +594,10 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
return err return err
} }
if strings.HasPrefix(name, "out") { if strings.HasPrefix(relPath, "out") {
if !info.IsDir() {
outputs[relPath] = true outputs[relPath] = true
}
return nil return nil
} else { } else {
inputs[relPath] = true inputs[relPath] = true
@ -309,3 +616,98 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
return copyFile(path, destPath) return copyFile(path, destPath)
}) })
} }
func ListDir(t *testing.T, src string) []string {
var files []string
err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
if err != nil {
// Do not FailNow here.
// The output comparison is happening after this call which includes output.txt which
// includes errors printed by commands which include explanation why a given file cannot be read.
t.Errorf("Error when listing %s: path=%s: %s", src, path, err)
return nil
}
if info.IsDir() {
return nil
}
relPath, err := filepath.Rel(src, path)
if err != nil {
return err
}
files = append(files, relPath)
return nil
})
if err != nil {
t.Errorf("Failed to list %s: %s", src, err)
}
return files
}
func getUVDefaultCacheDir(t *testing.T) string {
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
// the default cache directory is
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
cacheDir, err := os.UserCacheDir()
require.NoError(t, err)
if runtime.GOOS == "windows" {
return cacheDir + "\\uv\\cache"
} else {
return cacheDir + "/uv"
}
}
func RunCommand(t *testing.T, args []string, dir string) {
start := time.Now()
cmd := exec.Command(args[0], args[1:]...)
cmd.Dir = dir
out, err := cmd.CombinedOutput()
elapsed := time.Since(start)
t.Logf("%s took %s", args, elapsed)
require.NoError(t, err, "%s failed: %s\n%s", args, err, out)
if len(out) > 0 {
t.Logf("%s output: %s", args, out)
}
}
type LoggedRequest struct {
Headers http.Header `json:"headers,omitempty"`
Method string `json:"method"`
Path string `json:"path"`
Body any `json:"body,omitempty"`
RawBody string `json:"raw_body,omitempty"`
}
func getLoggedRequest(req *testserver.Request, includedHeaders []string) LoggedRequest {
result := LoggedRequest{
Method: req.Method,
Path: req.URL.Path,
Headers: filterHeaders(req.Headers, includedHeaders),
}
if json.Valid(req.Body) {
result.Body = json.RawMessage(req.Body)
} else {
result.RawBody = string(req.Body)
}
return result
}
func filterHeaders(h http.Header, includedHeaders []string) http.Header {
headers := make(http.Header)
for k, v := range h {
if !slices.Contains(includedHeaders, k) {
continue
}
headers[k] = v
}
return headers
}
func isTruePtr(value *bool) bool {
return value != nil && *value
}

View File

@ -0,0 +1,5 @@
[DEFAULT]
host = $DATABRICKS_HOST
[profile_name]
host = https://test@non-existing-subdomain.databricks.com

View File

@ -0,0 +1,14 @@
bundle:
name: test-auth
workspace:
host: $DATABRICKS_HOST
targets:
dev:
default: true
workspace:
host: $DATABRICKS_HOST
prod:
workspace:
host: https://bar.com

View File

@ -0,0 +1,91 @@
=== Inside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Inside the bundle, target flags
>>> errcode [CLI] current-user me -t dev
"[USERNAME]"
=== Inside the bundle, target and matching profile
>>> errcode [CLI] current-user me -t dev -p DEFAULT
"[USERNAME]"
=== Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle.
>>> errcode [CLI] current-user me -p profile_name
Error: Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)
Exit code: 1
=== Inside the bundle, target and not matching profile
>>> errcode [CLI] current-user me -t dev -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Exit code: 1
=== Bundle commands load bundle configuration when no flags, validation OK
>>> errcode [CLI] bundle validate
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -t flag, validation OK
>>> errcode [CLI] bundle validate -t dev
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)
>>> errcode [CLI] bundle validate -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
Found 1 error
Exit code: 1
=== Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)
>>> errcode [CLI] bundle validate -t dev -p DEFAULT
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)
>>> errcode [CLI] bundle validate -t prod -p DEFAULT
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host [DATABRICKS_TARGET], but CLI configured to use https://bar.com
Name: test-auth
Target: prod
Workspace:
Host: https://bar.com
Found 1 error
Exit code: 1
=== Outside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"
=== Outside the bundle, profile flag
>>> errcode [CLI] current-user me -p profile_name
"[USERNAME]"

View File

@ -0,0 +1,45 @@
# Replace placeholder with an actual host URL
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
envsubst < .databrickscfg > out && mv out .databrickscfg
export DATABRICKS_CONFIG_FILE=.databrickscfg
host=$DATABRICKS_HOST
unset DATABRICKS_HOST
title "Inside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Inside the bundle, target flags"
trace errcode $CLI current-user me -t dev | jq .userName
title "Inside the bundle, target and matching profile"
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
title "Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle."
trace errcode $CLI current-user me -p profile_name | jq .userName
title "Inside the bundle, target and not matching profile"
trace errcode $CLI current-user me -t dev -p profile_name
title "Bundle commands load bundle configuration when no flags, validation OK"
trace errcode $CLI bundle validate
title "Bundle commands load bundle configuration with -t flag, validation OK"
trace errcode $CLI bundle validate -t dev
title "Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)"
trace errcode $CLI bundle validate -p profile_name
title "Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)"
trace errcode $CLI bundle validate -t dev -p DEFAULT
title "Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)"
trace errcode $CLI bundle validate -t prod -p DEFAULT
cd ..
export DATABRICKS_HOST=$host
title "Outside the bundle, no flags"
trace errcode $CLI current-user me | jq .userName
title "Outside the bundle, profile flag"
trace errcode $CLI current-user me -p profile_name | jq .userName

View File

@ -0,0 +1,14 @@
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
[[Repls]]
Old='DATABRICKS_HOST'
New='DATABRICKS_TARGET'
[[Repls]]
Old='DATABRICKS_URL'
New='DATABRICKS_TARGET'
[[Repls]]
Old='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": .*'
New='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)'

View File

@ -0,0 +1,12 @@
{
"headers": {
"Authorization": [
"Basic [ENCODED_AUTH]"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/basic"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,8 @@
# Unset the token which is configured by default
# in acceptance tests
export DATABRICKS_TOKEN=""
export DATABRICKS_USERNAME=username
export DATABRICKS_PASSWORD=password
$CLI current-user me

View File

@ -0,0 +1,4 @@
# "username:password" in base64 is dXNlcm5hbWU6cGFzc3dvcmQ=, expect to see this in Authorization header
[[Repls]]
Old = "dXNlcm5hbWU6cGFzc3dvcmQ="
New = "[ENCODED_AUTH]"

View File

@ -0,0 +1,34 @@
{
"headers": {
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
]
},
"method": "GET",
"path": "/oidc/.well-known/oauth-authorization-server"
}
{
"headers": {
"Authorization": [
"Basic [ENCODED_AUTH]"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
]
},
"method": "POST",
"path": "/oidc/v1/token",
"raw_body": "grant_type=client_credentials\u0026scope=all-apis"
}
{
"headers": {
"Authorization": [
"Bearer oauth-token"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/oauth-m2m"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,8 @@
# Unset the token which is configured by default
# in acceptance tests
export DATABRICKS_TOKEN=""
export DATABRICKS_CLIENT_ID=client_id
export DATABRICKS_CLIENT_SECRET=client_secret
$CLI current-user me

View File

@ -0,0 +1,5 @@
# "client_id:client_secret" in base64 is Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ=, expect to
# see this in Authorization header
[[Repls]]
Old = "Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="
New = "[ENCODED_AUTH]"

View File

@ -0,0 +1,12 @@
{
"headers": {
"Authorization": [
"Bearer dapi1234"
],
"User-Agent": [
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/pat"
]
},
"method": "GET",
"path": "/api/2.0/preview/scim/v2/Me"
}

View File

@ -0,0 +1,4 @@
{
"id":"[USERID]",
"userName":"[USERNAME]"
}

View File

@ -0,0 +1,3 @@
export DATABRICKS_TOKEN=dapi1234
$CLI current-user me

View File

@ -0,0 +1,18 @@
RecordRequests = true
IncludeRequestHeaders = ["Authorization", "User-Agent"]
[[Repls]]
Old = '(linux|darwin|windows)'
New = '[OS]'
[[Repls]]
Old = " upstream/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " upstream-version/[A-Za-z0-9.-]+"
New = ""
[[Repls]]
Old = " cicd/[A-Za-z0-9.-]+"
New = ""

31
acceptance/bin/callserver.py Executable file
View File

@ -0,0 +1,31 @@
#!/usr/bin/env python3
import sys
import os
import json
import urllib.request
from urllib.parse import urlencode
env = {}
for key, value in os.environ.items():
if len(value) > 10_000:
sys.stderr.write(f"Dropping key={key} value len={len(value)}\n")
continue
env[key] = value
q = {
"args": " ".join(sys.argv[1:]),
"cwd": os.getcwd(),
"env": json.dumps(env),
}
url = os.environ["CMD_SERVER_URL"] + "/?" + urlencode(q)
if len(url) > 100_000:
sys.exit("url too large")
resp = urllib.request.urlopen(url)
assert resp.status == 200, (resp.status, resp.url, resp.headers)
result = json.load(resp)
sys.stderr.write(result["stderr"])
sys.stdout.write(result["stdout"])
exitcode = int(result["exitcode"])
sys.exit(exitcode)

56
acceptance/bin/diff.py Executable file
View File

@ -0,0 +1,56 @@
#!/usr/bin/env python3
"""This script implements "diff -r -U2 dir1 dir2" but applies replacements first"""
import sys
import difflib
import json
import re
from pathlib import Path
def replaceAll(patterns, s):
for comp, new in patterns:
s = comp.sub(new, s)
return s
def main():
d1, d2 = sys.argv[1:]
d1, d2 = Path(d1), Path(d2)
with open("repls.json") as f:
repls = json.load(f)
patterns = []
for r in repls:
try:
c = re.compile(r["Old"])
patterns.append((c, r["New"]))
except re.error as e:
print(f"Regex error for pattern {r}: {e}", file=sys.stderr)
files1 = [str(p.relative_to(d1)) for p in d1.rglob("*") if p.is_file()]
files2 = [str(p.relative_to(d2)) for p in d2.rglob("*") if p.is_file()]
set1 = set(files1)
set2 = set(files2)
for f in sorted(set1 | set2):
p1 = d1 / f
p2 = d2 / f
if f not in set2:
print(f"Only in {d1}: {f}")
elif f not in set1:
print(f"Only in {d2}: {f}")
else:
a = replaceAll(patterns, p1.read_text()).splitlines(True)
b = replaceAll(patterns, p2.read_text()).splitlines(True)
if a != b:
p1_str = p1.as_posix()
p2_str = p2.as_posix()
for line in difflib.unified_diff(a, b, p1_str, p2_str, "", "", 2):
print(line, end="")
if __name__ == "__main__":
main()

View File

@ -4,6 +4,7 @@ Helper to sort blocks in text file. A block is a set of lines separated from oth
This is to workaround non-determinism in the output. This is to workaround non-determinism in the output.
""" """
import sys import sys
blocks = [] blocks = []
@ -11,10 +12,10 @@ blocks = []
for line in sys.stdin: for line in sys.stdin:
if not line.strip(): if not line.strip():
if blocks and blocks[-1]: if blocks and blocks[-1]:
blocks.append('') blocks.append("")
continue continue
if not blocks: if not blocks:
blocks.append('') blocks.append("")
blocks[-1] += line blocks[-1] += line
blocks.sort() blocks.sort()

10
acceptance/bin/sort_lines.py Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/env python3
"""
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
"""
import sys
lines = sys.stdin.readlines()
lines.sort()
sys.stdout.write("".join(lines))

View File

@ -1 +0,0 @@
databricks

View File

@ -0,0 +1,2 @@
bundle:
name: debug

View File

@ -0,0 +1,15 @@
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true

View File

@ -0,0 +1,88 @@
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
10:07:59 Info: Phase: load pid=12345
10:07:59 Debug: Apply pid=12345 mutator=EntryPoint
10:07:59 Debug: Apply pid=12345 mutator=scripts.preinit
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=ProcessRootIncludes
10:07:59 Debug: Apply pid=12345 mutator=VerifyCliVersion
10:07:59 Debug: Apply pid=12345 mutator=EnvironmentsToTargets
10:07:59 Debug: Apply pid=12345 mutator=ComputeIdToClusterId
10:07:59 Debug: Apply pid=12345 mutator=InitializeVariables
10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load)
10:07:59 Debug: Apply pid=12345 mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=<func>
10:07:59 Info: Phase: initialize pid=12345
10:07:59 Debug: Apply pid=12345 mutator=validate:AllResourcesHaveValues
10:07:59 Debug: Apply pid=12345 mutator=RewriteSyncPaths
10:07:59 Debug: Apply pid=12345 mutator=SyncDefaultPath
10:07:59 Debug: Apply pid=12345 mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=PopulateCurrentUser
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
< HTTP/1.1 200 OK
< {
< "id": "[USERID]",
< "userName": "[USERNAME]"
< } pid=12345 mutator=PopulateCurrentUser sdk=true
10:07:59 Debug: Apply pid=12345 mutator=LoadGitDetails
10:07:59 Debug: Apply pid=12345 mutator=ApplySourceLinkedDeploymentPreset
10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=ExpandWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=DefaultWorkspacePaths
10:07:59 Debug: Apply pid=12345 mutator=PrependWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=RewriteWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=SetVariables
10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(init)
10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources)
10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators)
10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=ResolveResourceReferences
10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=MergeJobClusters
10:07:59 Debug: Apply pid=12345 mutator=MergeJobParameters
10:07:59 Debug: Apply pid=12345 mutator=MergeJobTasks
10:07:59 Debug: Apply pid=12345 mutator=MergePipelineClusters
10:07:59 Debug: Apply pid=12345 mutator=MergeApps
10:07:59 Debug: Apply pid=12345 mutator=CaptureSchemaDependency
10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions
10:07:59 Debug: Apply pid=12345 mutator=SetRunAs
10:07:59 Debug: Apply pid=12345 mutator=OverrideCompute
10:07:59 Debug: Apply pid=12345 mutator=ConfigureDashboardDefaults
10:07:59 Debug: Apply pid=12345 mutator=ConfigureVolumeDefaults
10:07:59 Debug: Apply pid=12345 mutator=ProcessTargetMode
10:07:59 Debug: Apply pid=12345 mutator=ApplyPresets
10:07:59 Debug: Apply pid=12345 mutator=DefaultQueueing
10:07:59 Debug: Apply pid=12345 mutator=ExpandPipelineGlobPaths
10:07:59 Debug: Apply pid=12345 mutator=ConfigureWSFS
10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths
10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning
10:07:59 Debug: Apply pid=12345 mutator=apps.Validate
10:07:59 Debug: Apply pid=12345 mutator=ValidateSharedRootPermissions
10:07:59 Debug: Apply pid=12345 mutator=ApplyBundlePermissions
10:07:59 Debug: Apply pid=12345 mutator=FilterCurrentUserFromPermissions
10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs
10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines
10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found
< {
< "message": "Workspace path not found"
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
> {
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
> }
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 200 OK
< {
< "object_type": "DIRECTORY",
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
10:07:59 Info: completed execution pid=12345 exit_code=0

View File

@ -0,0 +1,7 @@
Name: debug
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/debug/default
Validation OK!

View File

@ -0,0 +1,4 @@
$CLI bundle validate --debug 2> full.stderr.txt
grep -vw parallel full.stderr.txt > out.stderr.txt
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
rm full.stderr.txt

View File

@ -0,0 +1,18 @@
Cloud = false
[[Repls]]
# The keys are unsorted and also vary per OS
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '
New = 'Environment variables for Terraform: ...redacted... '
[[Repls]]
Old = 'pid=[0-9]+'
New = 'pid=12345'
[[Repls]]
Old = '\d\d:\d\d:\d\d'
New = '10:07:59'
[[Repls]]
Old = '\\'
New = '/'

View File

@ -0,0 +1,2 @@
bundle:
name: git_job

View File

@ -0,0 +1,17 @@
resources:
jobs:
out:
name: gitjob
tasks:
- task_key: test_task
notebook_task:
notebook_path: some/test/notebook.py
- task_key: test_task_2
notebook_task:
notebook_path: /Workspace/Users/foo@bar.com/some/test/notebook.py
source: WORKSPACE
git_source:
git_branch: main
git_commit: abcdef
git_provider: github
git_url: https://git.databricks.com

View File

@ -0,0 +1,2 @@
Job is using Git source, skipping downloading files
Job configuration successfully saved to out.job.yml

View File

@ -0,0 +1 @@
$CLI bundle generate job --existing-job-id 1234 --config-dir . --key out

View File

@ -0,0 +1,33 @@
Cloud = false # This test needs to run against stubbed Databricks API
[[Server]]
Pattern = "GET /api/2.1/jobs/get"
Response.Body = '''
{
"job_id": 11223344,
"settings": {
"name": "gitjob",
"git_source": {
"git_url": "https://git.databricks.com",
"git_provider": "github",
"git_branch": "main",
"git_commit": "abcdef"
},
"tasks": [
{
"task_key": "test_task",
"notebook_task": {
"notebook_path": "some/test/notebook.py"
}
},
{
"task_key": "test_task_2",
"notebook_task": {
"source": "WORKSPACE",
"notebook_path": "/Workspace/Users/foo@bar.com/some/test/notebook.py"
}
}
]
}
}
'''

View File

@ -0,0 +1,2 @@
bundle:
name: git-permerror

View File

@ -0,0 +1,81 @@
=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
>>> chmod 000 .git
>>> [CLI] bundle validate
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Name: git-permerror
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
Found 1 error
Exit code: 1
>>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
>>> chmod 000 .git/HEAD
>>> [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to load current branch: open HEAD: permission denied
Warn: failed to load latest commit: open HEAD: permission denied
{
"bundle_root_path": "."
}
=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
>>> chmod 000 .git/config
>>> [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}
>>> withdir subdir/a/b [CLI] bundle validate -o json
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
Error: unable to load repository specific gitconfig: open config: permission denied
Exit code: 1
{
"bundle_root_path": "."
}

View File

@ -0,0 +1,26 @@
mkdir myrepo
cd myrepo
cp ../databricks.yml .
git-repo-init
mkdir -p subdir/a/b
printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
trace chmod 000 .git
errcode trace $CLI bundle validate
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
chmod 700 .git
trace chmod 000 .git/HEAD
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
chmod 666 .git/HEAD
trace chmod 000 .git/config
errcode trace $CLI bundle validate -o json | jq .bundle.git
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
cd ..
rm -fr myrepo

View File

@ -0,0 +1,5 @@
Badness = "inferred flag is set to true incorrect; bundle_root_path is not correct; Warn and Error talk about the same; Warn goes to stderr, Error goes to stdout (for backward compat); Warning about permissions repeated twice"
[GOOS]
# This test relies on chmod which does not work on Windows
windows = false

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle deploy --help
Deploy bundle
Usage:
databricks bundle deploy [flags]
Flags:
--auto-approve Skip interactive approvals that might be required for deployment.
-c, --cluster-id string Override cluster in the deployment with the given cluster ID.
--fail-on-active-runs Fail if there are running jobs or pipelines in the deployment.
--force Force-override Git branch validation.
--force-lock Force acquisition of deployment lock.
-h, --help help for deploy
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle deploy --help

View File

@ -0,0 +1,22 @@
>>> [CLI] bundle deployment --help
Deployment related commands
Usage:
databricks bundle deployment [command]
Available Commands:
bind Bind bundle-defined resources to existing resources
unbind Unbind bundle-defined resources from its managed remote resource
Flags:
-h, --help help for deployment
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Use "databricks bundle deployment [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle deployment --help

View File

@ -0,0 +1,18 @@
>>> [CLI] bundle destroy --help
Destroy deployed bundle resources
Usage:
databricks bundle destroy [flags]
Flags:
--auto-approve Skip interactive approvals for deleting resources and files
--force-lock Force acquisition of deployment lock.
-h, --help help for destroy
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle destroy --help

View File

@ -0,0 +1,24 @@
>>> [CLI] bundle generate dashboard --help
Generate configuration for a dashboard
Usage:
databricks bundle generate dashboard [flags]
Flags:
-s, --dashboard-dir string directory to write the dashboard representation to (default "src")
--existing-id string ID of the dashboard to generate configuration for
--existing-path string workspace path of the dashboard to generate configuration for
-f, --force force overwrite existing files in the output directory
-h, --help help for dashboard
--resource string resource key of dashboard to watch for changes
-d, --resource-dir string directory to write the configuration to (default "resources")
--watch watch for changes to the dashboard and update the configuration
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate dashboard --help

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle generate job --help
Generate bundle configuration for a job
Usage:
databricks bundle generate job [flags]
Flags:
-d, --config-dir string Dir path where the output config will be stored (default "resources")
--existing-job-id int Job ID of the job to generate config for
-f, --force Force overwrite existing files in the output directory
-h, --help help for job
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate job --help

View File

@ -0,0 +1,21 @@
>>> [CLI] bundle generate pipeline --help
Generate bundle configuration for a pipeline
Usage:
databricks bundle generate pipeline [flags]
Flags:
-d, --config-dir string Dir path where the output config will be stored (default "resources")
--existing-pipeline-id string ID of the pipeline to generate config for
-f, --force Force overwrite existing files in the output directory
-h, --help help for pipeline
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
Global Flags:
--debug enable debug logging
--key string resource key to use for the generated configuration
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle generate pipeline --help

View File

@ -0,0 +1,25 @@
>>> [CLI] bundle generate --help
Generate bundle configuration
Usage:
databricks bundle generate [command]
Available Commands:
app Generate bundle configuration for a Databricks app
dashboard Generate configuration for a dashboard
job Generate bundle configuration for a job
pipeline Generate bundle configuration for a pipeline
Flags:
-h, --help help for generate
--key string resource key to use for the generated configuration
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Use "databricks bundle generate [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle generate --help

View File

@ -0,0 +1,31 @@
>>> [CLI] bundle init --help
Initialize using a bundle template.
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
- default-python: The default Python template for Notebooks / Delta Live Tables / Workflows
- default-sql: The default SQL template for .sql files that run with Databricks SQL
- dbt-sql: The dbt SQL template (databricks.com/blog/delivering-cost-effective-data-real-time-dbt-and-databricks)
- mlops-stacks: The Databricks MLOps Stacks template (github.com/databricks/mlops-stacks)
- a local file system path with a template directory
- a Git repository URL, e.g. https://github.com/my/repository
See https://docs.databricks.com/en/dev-tools/bundles/templates.html for more information on templates.
Usage:
databricks bundle init [TEMPLATE_PATH] [flags]
Flags:
--branch string Git branch to use for template initialization
--config-file string JSON file containing key value pairs of input parameters required for template initialization.
-h, --help help for init
--output-dir string Directory to write the initialized template to.
--tag string Git tag to use for template initialization
--template-dir string Directory path within a Git repository containing the template.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle init --help

View File

@ -0,0 +1,17 @@
>>> [CLI] bundle open --help
Open a resource in the browser
Usage:
databricks bundle open [flags]
Flags:
--force-pull Skip local cache and load the state from the remote workspace
-h, --help help for open
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle open --help

View File

@ -0,0 +1,57 @@
>>> [CLI] bundle run --help
Run the job or pipeline identified by KEY.
The KEY is the unique identifier of the resource to run. In addition to
customizing the run using any of the available flags, you can also specify
keyword or positional arguments as shown in these examples:
databricks bundle run my_job -- --key1 value1 --key2 value2
Or:
databricks bundle run my_job -- value1 value2 value3
If the specified job uses job parameters or the job has a notebook task with
parameters, the first example applies and flag names are mapped to the
parameter names.
If the specified job does not use job parameters and the job has a Python file
task or a Python wheel task, the second example applies.
Usage:
databricks bundle run [flags] KEY
Job Flags:
--params stringToString comma separated k=v pairs for job parameters (default [])
Job Task Flags:
Note: please prefer use of job-level parameters (--param) over task-level parameters.
For more information, see https://docs.databricks.com/en/workflows/jobs/create-run-jobs.html#pass-parameters-to-a-databricks-job-task
--dbt-commands strings A list of commands to execute for jobs with DBT tasks.
--jar-params strings A list of parameters for jobs with Spark JAR tasks.
--notebook-params stringToString A map from keys to values for jobs with notebook tasks. (default [])
--pipeline-params stringToString A map from keys to values for jobs with pipeline tasks. (default [])
--python-named-params stringToString A map from keys to values for jobs with Python wheel tasks. (default [])
--python-params strings A list of parameters for jobs with Python tasks.
--spark-submit-params strings A list of parameters for jobs with Spark submit tasks.
--sql-params stringToString A map from keys to values for jobs with SQL tasks. (default [])
Pipeline Flags:
--full-refresh strings List of tables to reset and recompute.
--full-refresh-all Perform a full graph reset and recompute.
--refresh strings List of tables to update.
--refresh-all Perform a full graph update.
--validate-only Perform an update to validate graph correctness.
Flags:
-h, --help help for run
--no-wait Don't wait for the run to complete.
--restart Restart the run if it is already running.
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle run --help

View File

@ -0,0 +1,16 @@
>>> [CLI] bundle schema --help
Generate JSON Schema for bundle configuration
Usage:
databricks bundle schema [flags]
Flags:
-h, --help help for schema
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle schema --help

View File

@ -0,0 +1,17 @@
>>> [CLI] bundle summary --help
Summarize resources deployed by this bundle
Usage:
databricks bundle summary [flags]
Flags:
--force-pull Skip local cache and load the state from the remote workspace
-h, --help help for summary
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle summary --help

View File

@ -0,0 +1,19 @@
>>> [CLI] bundle sync --help
Synchronize bundle tree to the workspace
Usage:
databricks bundle sync [flags]
Flags:
--full perform full synchronization (default is incremental)
-h, --help help for sync
--interval duration file system polling interval (for --watch) (default 1s)
--output type type of the output format
--watch watch local file system for changes
Global Flags:
--debug enable debug logging
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle sync --help

View File

@ -0,0 +1,16 @@
>>> [CLI] bundle validate --help
Validate configuration
Usage:
databricks bundle validate [flags]
Flags:
-h, --help help for validate
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"

View File

@ -0,0 +1 @@
trace $CLI bundle validate --help

View File

@ -0,0 +1,33 @@
>>> [CLI] bundle --help
Databricks Asset Bundles let you express data/AI/analytics projects as code.
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
Usage:
databricks bundle [command]
Available Commands:
deploy Deploy bundle
deployment Deployment related commands
destroy Destroy deployed bundle resources
generate Generate bundle configuration
init Initialize using a bundle template
open Open a resource in the browser
run Run a job or pipeline update
schema Generate JSON Schema for bundle configuration
summary Summarize resources deployed by this bundle
sync Synchronize bundle tree to the workspace
validate Validate configuration
Flags:
-h, --help help for bundle
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
Use "databricks bundle [command] --help" for more information about a command.

View File

@ -0,0 +1 @@
trace $CLI bundle --help

View File

@ -0,0 +1 @@
Cloud = false

View File

@ -0,0 +1,6 @@
bundle:
name: non_yaml_in_includes
include:
- test.py
- resources/*.yml

View File

@ -0,0 +1,10 @@
Error: Files in the 'include' configuration section must be YAML or JSON files.
in databricks.yml:5:4
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
Name: non_yaml_in_includes
Found 1 error
Exit code: 1

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -0,0 +1 @@
print("Hello world")

View File

@ -0,0 +1 @@
.databricks

View File

@ -0,0 +1,27 @@
bundle:
name: maven
resources:
jobs:
testjob:
name: test-job
tasks:
- task_key: dbt
spark_jar_task:
main_class_name: com.databricks.example.Main
libraries:
- maven:
coordinates: org.jsoup:jsoup:1.7.2
new_cluster:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode

View File

@ -0,0 +1,7 @@
[
{
"maven": {
"coordinates": "org.jsoup:jsoup:1.7.2"
}
}
]

View File

@ -0,0 +1,15 @@
>>> [CLI] bundle validate -o json
[
{
"maven": {
"coordinates": "org.jsoup:jsoup:1.7.2"
}
}
]
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/maven/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate -o json | jq '.resources.jobs.testjob.tasks[0].libraries'
trace $CLI bundle deploy
cat out.requests.txt | jq 'select(.path == "/api/2.1/jobs/create")' | jq '.body.tasks[0].libraries' > out.job.libraries.txt
rm out.requests.txt

View File

@ -0,0 +1,5 @@
# We run this test only locally for now because we need to figure out how to do
# bundle destroy on script.cleanup first.
Cloud = false
RecordRequests = true

View File

@ -0,0 +1 @@
.databricks

View File

@ -0,0 +1,32 @@
bundle:
name: pypi
resources:
jobs:
testjob:
name: test-job
tasks:
- task_key: dbt
dbt_task:
project_directory: ./
profiles_directory: dbt_profiles/
commands:
- 'dbt deps --target=${bundle.target}'
- 'dbt seed --target=${bundle.target} --vars "{ dev_schema: ${workspace.current_user.short_name} }"'
- 'dbt run --target=${bundle.target} --vars "{ dev_schema: ${workspace.current_user.short_name} }"'
libraries:
- pypi:
package: dbt-databricks>=1.8.0,<2.0.0
new_cluster:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode

View File

@ -0,0 +1,7 @@
[
{
"pypi": {
"package": "dbt-databricks>=1.8.0,<2.0.0"
}
}
]

Some files were not shown because too many files have changed in this diff Show More