mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin' into refactor-bundle-init-squashed
This commit is contained in:
commit
0f62d0edcf
|
@ -1 +1 @@
|
||||||
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
|
779817ed8d63031f5ea761fbd25ee84f38feec0d
|
|
@ -140,9 +140,9 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{$method := .}}
|
{{$method := .}}
|
||||||
{{ if not .IsJsonOnly }}
|
{{ if not .IsJsonOnly }}
|
||||||
{{range $request.Fields -}}
|
{{range .AllFields -}}
|
||||||
{{- if not .Required -}}
|
{{- if not .Required -}}
|
||||||
{{if .Entity.IsObject }}// TODO: complex arg: {{.Name}}
|
{{if .Entity.IsObject}}{{if not (eq . $method.RequestBodyField) }}// TODO: complex arg: {{.Name}}{{end}}
|
||||||
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
|
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
|
||||||
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
|
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
|
||||||
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
|
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
|
||||||
|
|
|
@ -4,3 +4,7 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "monthly"
|
||||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
|
||||||
with:
|
with:
|
||||||
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
||||||
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
||||||
|
@ -31,10 +31,8 @@ jobs:
|
||||||
exempt-pr-labels: No Autoclose
|
exempt-pr-labels: No Autoclose
|
||||||
|
|
||||||
# Issue timing
|
# Issue timing
|
||||||
days-before-stale: 30
|
days-before-stale: 60
|
||||||
days-before-close: 7
|
days-before-close: 30
|
||||||
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
loglevel: DEBUG
|
loglevel: DEBUG
|
||||||
# TODO: Remove dry-run after merge when confirmed it works correctly
|
|
||||||
dry-run: true
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ jobs:
|
||||||
if: "${{ github.event.pull_request.head.repo.fork }}"
|
if: "${{ github.event.pull_request.head.repo.fork }}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Delete old comments
|
- name: Delete old comments
|
||||||
env:
|
env:
|
||||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -23,7 +23,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -2,15 +2,27 @@ name: publish-winget
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Tag to publish'
|
||||||
|
default: ''
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish-to-winget-pkgs:
|
publish-to-winget-pkgs:
|
||||||
runs-on: windows-latest
|
runs-on:
|
||||||
|
group: databricks-protected-runner-group
|
||||||
|
labels: windows-server-latest
|
||||||
|
|
||||||
environment: release
|
environment: release
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # https://github.com/vedantmgoyal2009/winget-releaser/releases/tag/v2
|
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2
|
||||||
with:
|
with:
|
||||||
identifier: Databricks.DatabricksCLI
|
identifier: Databricks.DatabricksCLI
|
||||||
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
||||||
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||||
fork-user: eng-dev-ecosystem-bot
|
fork-user: eng-dev-ecosystem-bot
|
||||||
|
|
||||||
|
# Use the tag from the input, or the ref name if the input is not provided.
|
||||||
|
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||||
|
release-tag: ${{ inputs.tag || github.ref_name }}
|
||||||
|
|
|
@ -45,20 +45,20 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v4
|
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
||||||
|
|
||||||
- name: Set go env
|
- name: Set go env
|
||||||
run: |
|
run: |
|
||||||
|
@ -79,8 +79,8 @@ jobs:
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
# Use different schema from regular job, to avoid overwriting the same key
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
@ -95,9 +95,9 @@ jobs:
|
||||||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||||
git diff --exit-code
|
git diff --exit-code
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v6
|
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1
|
||||||
with:
|
with:
|
||||||
version: v1.63.1
|
version: v1.63.4
|
||||||
args: --timeout=15m
|
args: --timeout=15m
|
||||||
|
|
||||||
validate-bundle-schema:
|
validate-bundle-schema:
|
||||||
|
@ -106,10 +106,10 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
# Use different schema from regular job, to avoid overwriting the same key
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
|
|
@ -26,13 +26,13 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
|
@ -48,27 +48,27 @@ jobs:
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
id: releaser
|
id: releaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||||
with:
|
with:
|
||||||
version: ~> v2
|
version: ~> v2
|
||||||
args: release --snapshot --skip docker
|
args: release --snapshot --skip docker
|
||||||
|
|
||||||
- name: Upload macOS binaries
|
- name: Upload macOS binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_darwin_snapshot
|
name: cli_darwin_snapshot
|
||||||
path: |
|
path: |
|
||||||
dist/*_darwin_*/
|
dist/*_darwin_*/
|
||||||
|
|
||||||
- name: Upload Linux binaries
|
- name: Upload Linux binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_linux_snapshot
|
name: cli_linux_snapshot
|
||||||
path: |
|
path: |
|
||||||
dist/*_linux_*/
|
dist/*_linux_*/
|
||||||
|
|
||||||
- name: Upload Windows binaries
|
- name: Upload Windows binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_windows_snapshot
|
name: cli_windows_snapshot
|
||||||
path: |
|
path: |
|
||||||
|
@ -88,7 +88,7 @@ jobs:
|
||||||
# Snapshot release may only be updated for commits to the main branch.
|
# Snapshot release may only be updated for commits to the main branch.
|
||||||
if: github.ref == 'refs/heads/main'
|
if: github.ref == 'refs/heads/main'
|
||||||
|
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||||
with:
|
with:
|
||||||
name: Snapshot
|
name: Snapshot
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
|
|
@ -18,13 +18,13 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ jobs:
|
||||||
|
|
||||||
# Log into the GitHub Container Registry. The goreleaser action will create
|
# Log into the GitHub Container Registry. The goreleaser action will create
|
||||||
# the docker images and push them to the GitHub Container Registry.
|
# the docker images and push them to the GitHub Container Registry.
|
||||||
- uses: "docker/login-action@v3"
|
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: "ghcr.io"
|
registry: "ghcr.io"
|
||||||
username: "${{ github.actor }}"
|
username: "${{ github.actor }}"
|
||||||
|
@ -46,11 +46,11 @@ jobs:
|
||||||
# QEMU is required to build cross platform docker images using buildx.
|
# QEMU is required to build cross platform docker images using buildx.
|
||||||
# It allows virtualization of the CPU architecture at the application level.
|
# It allows virtualization of the CPU architecture at the application level.
|
||||||
- name: Set up QEMU dependency
|
- name: Set up QEMU dependency
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
id: releaser
|
id: releaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||||
with:
|
with:
|
||||||
version: ~> v2
|
version: ~> v2
|
||||||
args: release
|
args: release
|
||||||
|
@ -71,7 +71,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update setup-cli
|
- name: Update setup-cli
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
@ -99,7 +99,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update homebrew-tap
|
- name: Update homebrew-tap
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
@ -140,7 +140,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update CLI version in the VSCode extension
|
- name: Update CLI version in the VSCode extension
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
|
|
@ -20,6 +20,7 @@ dist/
|
||||||
|
|
||||||
*.log
|
*.log
|
||||||
coverage.txt
|
coverage.txt
|
||||||
|
coverage-acceptance.txt
|
||||||
|
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pyc
|
*.pyc
|
||||||
|
|
|
@ -15,6 +15,7 @@ linters:
|
||||||
- intrange
|
- intrange
|
||||||
- mirror
|
- mirror
|
||||||
- perfsprint
|
- perfsprint
|
||||||
|
- unconvert
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
enable-all: true
|
enable-all: true
|
||||||
|
@ -41,6 +42,8 @@ linters-settings:
|
||||||
disable:
|
disable:
|
||||||
# good check, but we have too many assert.(No)?Errorf? so excluding for now
|
# good check, but we have too many assert.(No)?Errorf? so excluding for now
|
||||||
- require-error
|
- require-error
|
||||||
|
copyloopvar:
|
||||||
|
check-alias: true
|
||||||
issues:
|
issues:
|
||||||
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
||||||
max-issues-per-linter: 1000
|
max-issues-per-linter: 1000
|
||||||
|
|
44
CHANGELOG.md
44
CHANGELOG.md
|
@ -1,5 +1,49 @@
|
||||||
# Version changelog
|
# Version changelog
|
||||||
|
|
||||||
|
## [Release] Release v0.239.0
|
||||||
|
|
||||||
|
### New feature announcement
|
||||||
|
|
||||||
|
#### Databricks Apps support
|
||||||
|
|
||||||
|
You can now manage Databricks Apps using DABs by defining an `app` resource in your bundle configuration.
|
||||||
|
For more information see Databricks documentation https://docs.databricks.com/en/dev-tools/bundles/resources.html#app
|
||||||
|
|
||||||
|
#### Referencing complex variables in complex variables
|
||||||
|
|
||||||
|
You can now reference complex variables within other complex variables.
|
||||||
|
For more details see https://github.com/databricks/cli/pull/2157
|
||||||
|
|
||||||
|
CLI:
|
||||||
|
* Filter out system clusters in cluster picker ([#2131](https://github.com/databricks/cli/pull/2131)).
|
||||||
|
* Add command line flags for fields that are not in the API request body ([#2155](https://github.com/databricks/cli/pull/2155)).
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Added support for Databricks Apps in DABs ([#1928](https://github.com/databricks/cli/pull/1928)).
|
||||||
|
* Allow artifact path to be located outside the sync root ([#2128](https://github.com/databricks/cli/pull/2128)).
|
||||||
|
* Retry app deployment if there is an active deployment in progress ([#2153](https://github.com/databricks/cli/pull/2153)).
|
||||||
|
* Resolve variables in a loop ([#2164](https://github.com/databricks/cli/pull/2164)).
|
||||||
|
* Improve resolution of complex variables within complex variables ([#2157](https://github.com/databricks/cli/pull/2157)).
|
||||||
|
* Added output message to warn about slower deployments with apps ([#2161](https://github.com/databricks/cli/pull/2161)).
|
||||||
|
* Patch references to UC schemas to capture dependencies automatically ([#1989](https://github.com/databricks/cli/pull/1989)).
|
||||||
|
* Format default-python template ([#2110](https://github.com/databricks/cli/pull/2110)).
|
||||||
|
* Encourage the use of root_path in production to ensure single deployment ([#1712](https://github.com/databricks/cli/pull/1712)).
|
||||||
|
* Log warnings to stderr for "bundle validate -o json" ([#2109](https://github.com/databricks/cli/pull/2109)).
|
||||||
|
|
||||||
|
API Changes:
|
||||||
|
* Changed `databricks account federation-policy update` command with new required argument order.
|
||||||
|
* Changed `databricks account service-principal-federation-policy update` command with new required argument order.
|
||||||
|
|
||||||
|
OpenAPI commit 779817ed8d63031f5ea761fbd25ee84f38feec0d (2025-01-08)
|
||||||
|
Dependency updates:
|
||||||
|
* Upgrade TF provider to 1.63.0 ([#2162](https://github.com/databricks/cli/pull/2162)).
|
||||||
|
* Bump golangci-lint version to v1.63.4 from v1.63.1 ([#2114](https://github.com/databricks/cli/pull/2114)).
|
||||||
|
* Bump astral-sh/setup-uv from 4 to 5 ([#2116](https://github.com/databricks/cli/pull/2116)).
|
||||||
|
* Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 ([#2080](https://github.com/databricks/cli/pull/2080)).
|
||||||
|
* Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 ([#2079](https://github.com/databricks/cli/pull/2079)).
|
||||||
|
* Bump golang.org/x/term from 0.27.0 to 0.28.0 ([#2078](https://github.com/databricks/cli/pull/2078)).
|
||||||
|
* Bump github.com/databricks/databricks-sdk-go from 0.54.0 to 0.55.0 ([#2126](https://github.com/databricks/cli/pull/2126)).
|
||||||
|
|
||||||
## [Release] Release v0.238.0
|
## [Release] Release v0.238.0
|
||||||
|
|
||||||
Bundles:
|
Bundles:
|
||||||
|
|
23
Makefile
23
Makefile
|
@ -1,15 +1,21 @@
|
||||||
default: build
|
default: vendor fmt lint
|
||||||
|
|
||||||
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||||
|
|
||||||
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
./lint.sh ./...
|
golangci-lint run --fix
|
||||||
|
|
||||||
lintcheck:
|
lintcheck:
|
||||||
golangci-lint run ./...
|
golangci-lint run ./...
|
||||||
|
|
||||||
|
# Note 'make lint' will do formatting as well. However, if there are compilation errors,
|
||||||
|
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
|
||||||
|
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
|
||||||
|
fmt:
|
||||||
|
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
|
||||||
|
|
||||||
test:
|
test:
|
||||||
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- ${PACKAGES}
|
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- ${PACKAGES}
|
||||||
|
|
||||||
|
@ -19,6 +25,17 @@ cover:
|
||||||
showcover:
|
showcover:
|
||||||
go tool cover -html=coverage.txt
|
go tool cover -html=coverage.txt
|
||||||
|
|
||||||
|
acc-cover:
|
||||||
|
rm -fr ./acceptance/build/cover/
|
||||||
|
CLI_GOCOVERDIR=build/cover go test ./acceptance
|
||||||
|
rm -fr ./acceptance/build/cover-merged/
|
||||||
|
mkdir -p acceptance/build/cover-merged/
|
||||||
|
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
||||||
|
go tool covdata textfmt -i acceptance/build/cover-merged -o coverage-acceptance.txt
|
||||||
|
|
||||||
|
acc-showcover:
|
||||||
|
go tool cover -html=coverage-acceptance.txt
|
||||||
|
|
||||||
build: vendor
|
build: vendor
|
||||||
go build -mod vendor
|
go build -mod vendor
|
||||||
|
|
||||||
|
@ -39,4 +56,4 @@ integration:
|
||||||
integration-short:
|
integration-short:
|
||||||
$(INTEGRATION) -short
|
$(INTEGRATION) -short
|
||||||
|
|
||||||
.PHONY: lint lintcheck test cover showcover build snapshot vendor schema integration integration-short
|
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package acceptance_test
|
package acceptance_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
@ -17,6 +18,7 @@ import (
|
||||||
"github.com/databricks/cli/internal/testutil"
|
"github.com/databricks/cli/internal/testutil"
|
||||||
"github.com/databricks/cli/libs/env"
|
"github.com/databricks/cli/libs/env"
|
||||||
"github.com/databricks/cli/libs/testdiff"
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/databricks/databricks-sdk-go"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -35,26 +37,67 @@ var Scripts = map[string]bool{
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestAccept(t *testing.T) {
|
func TestAccept(t *testing.T) {
|
||||||
execPath := BuildCLI(t)
|
cwd, err := os.Getwd()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
||||||
|
|
||||||
|
if coverDir != "" {
|
||||||
|
require.NoError(t, os.MkdirAll(coverDir, os.ModePerm))
|
||||||
|
coverDir, err = filepath.Abs(coverDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
t.Logf("Writing coverage to %s", coverDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
execPath := BuildCLI(t, cwd, coverDir)
|
||||||
// $CLI is what test scripts are using
|
// $CLI is what test scripts are using
|
||||||
t.Setenv("CLI", execPath)
|
t.Setenv("CLI", execPath)
|
||||||
|
|
||||||
server := StartServer(t)
|
// Make helper scripts available
|
||||||
AddHandlers(server)
|
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||||
// Redirect API access to local server:
|
|
||||||
t.Setenv("DATABRICKS_HOST", fmt.Sprintf("http://127.0.0.1:%d", server.Port))
|
|
||||||
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
|
||||||
|
|
||||||
homeDir := t.TempDir()
|
repls := testdiff.ReplacementsContext{}
|
||||||
// Do not read user's ~/.databrickscfg
|
repls.Set(execPath, "$CLI")
|
||||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
|
||||||
|
tempHomeDir := t.TempDir()
|
||||||
|
repls.Set(tempHomeDir, "$TMPHOME")
|
||||||
|
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||||
|
|
||||||
|
// Prevent CLI from downloading terraform in each test:
|
||||||
|
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||||
|
|
||||||
|
if cloudEnv == "" {
|
||||||
|
server := StartServer(t)
|
||||||
|
AddHandlers(server)
|
||||||
|
// Redirect API access to local server:
|
||||||
|
t.Setenv("DATABRICKS_HOST", server.URL)
|
||||||
|
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
||||||
|
|
||||||
|
homeDir := t.TempDir()
|
||||||
|
// Do not read user's ~/.databrickscfg
|
||||||
|
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
workspaceClient, err := databricks.NewWorkspaceClient()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
user, err := workspaceClient.CurrentUser.Me(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, user)
|
||||||
|
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
||||||
|
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||||
|
|
||||||
testDirs := getTests(t)
|
testDirs := getTests(t)
|
||||||
require.NotEmpty(t, testDirs)
|
require.NotEmpty(t, testDirs)
|
||||||
|
|
||||||
for _, dir := range testDirs {
|
for _, dir := range testDirs {
|
||||||
t.Run(dir, func(t *testing.T) {
|
testName := strings.ReplaceAll(dir, "\\", "/")
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
runTest(t, dir)
|
runTest(t, dir, coverDir, repls)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -79,7 +122,7 @@ func getTests(t *testing.T) []string {
|
||||||
return testDirs
|
return testDirs
|
||||||
}
|
}
|
||||||
|
|
||||||
func runTest(t *testing.T, dir string) {
|
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
|
||||||
var tmpDir string
|
var tmpDir string
|
||||||
var err error
|
var err error
|
||||||
if KeepTmp {
|
if KeepTmp {
|
||||||
|
@ -102,11 +145,20 @@ func runTest(t *testing.T, dir string) {
|
||||||
|
|
||||||
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||||
cmd := exec.Command(args[0], args[1:]...)
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
if coverDir != "" {
|
||||||
|
// Creating individual coverage directory for each test, because writing to the same one
|
||||||
|
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||||
|
// +error: coverage meta-data emit failed: writing ... rename .../tmp.covmeta.b3f... .../covmeta.b3f2c...: no such file or directory
|
||||||
|
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
||||||
|
err := os.MkdirAll(coverDir, os.ModePerm)
|
||||||
|
require.NoError(t, err)
|
||||||
|
cmd.Env = append(os.Environ(), "GOCOVERDIR="+coverDir)
|
||||||
|
}
|
||||||
cmd.Dir = tmpDir
|
cmd.Dir = tmpDir
|
||||||
outB, err := cmd.CombinedOutput()
|
outB, err := cmd.CombinedOutput()
|
||||||
|
|
||||||
out := formatOutput(string(outB), err)
|
out := formatOutput(string(outB), err)
|
||||||
out = strings.ReplaceAll(out, os.Getenv("CLI"), "$CLI")
|
out = repls.Replace(out)
|
||||||
doComparison(t, filepath.Join(dir, "output.txt"), "script output", out)
|
doComparison(t, filepath.Join(dir, "output.txt"), "script output", out)
|
||||||
|
|
||||||
for key := range outputs {
|
for key := range outputs {
|
||||||
|
@ -125,7 +177,8 @@ func runTest(t *testing.T, dir string) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
pathExpected := filepath.Join(dir, key)
|
pathExpected := filepath.Join(dir, key)
|
||||||
doComparison(t, pathExpected, pathNew, string(newValBytes))
|
newVal := repls.Replace(string(newValBytes))
|
||||||
|
doComparison(t, pathExpected, pathNew, newVal)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure there are not unaccounted for new files
|
// Make sure there are not unaccounted for new files
|
||||||
|
@ -146,6 +199,7 @@ func runTest(t *testing.T, dir string) {
|
||||||
// Show the contents & support overwrite mode for it:
|
// Show the contents & support overwrite mode for it:
|
||||||
pathNew := filepath.Join(tmpDir, name)
|
pathNew := filepath.Join(tmpDir, name)
|
||||||
newVal := testutil.ReadFile(t, pathNew)
|
newVal := testutil.ReadFile(t, pathNew)
|
||||||
|
newVal = repls.Replace(newVal)
|
||||||
doComparison(t, filepath.Join(dir, name), filepath.Join(tmpDir, name), newVal)
|
doComparison(t, filepath.Join(dir, name), filepath.Join(tmpDir, name), newVal)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -171,6 +225,11 @@ func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) {
|
||||||
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
|
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
|
||||||
func readMergedScriptContents(t *testing.T, dir string) string {
|
func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
|
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
|
||||||
|
|
||||||
|
// Wrap script contents in a subshell such that changing the working
|
||||||
|
// directory only affects the main script and not cleanup.
|
||||||
|
scriptContents = "(\n" + scriptContents + ")\n"
|
||||||
|
|
||||||
prepares := []string{}
|
prepares := []string{}
|
||||||
cleanups := []string{}
|
cleanups := []string{}
|
||||||
|
|
||||||
|
@ -199,16 +258,30 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
return strings.Join(prepares, "\n")
|
return strings.Join(prepares, "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
func BuildCLI(t *testing.T) string {
|
func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
||||||
cwd, err := os.Getwd()
|
|
||||||
require.NoError(t, err)
|
|
||||||
execPath := filepath.Join(cwd, "build", "databricks")
|
execPath := filepath.Join(cwd, "build", "databricks")
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
execPath += ".exe"
|
execPath += ".exe"
|
||||||
}
|
}
|
||||||
|
|
||||||
start := time.Now()
|
start := time.Now()
|
||||||
args := []string{"go", "build", "-mod", "vendor", "-o", execPath}
|
args := []string{
|
||||||
|
"go", "build",
|
||||||
|
"-mod", "vendor",
|
||||||
|
"-o", execPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
if coverDir != "" {
|
||||||
|
args = append(args, "-cover")
|
||||||
|
}
|
||||||
|
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// Get this error on my local Windows:
|
||||||
|
// error obtaining VCS status: exit status 128
|
||||||
|
// Use -buildvcs=false to disable VCS stamping.
|
||||||
|
args = append(args, "-buildvcs=false")
|
||||||
|
}
|
||||||
|
|
||||||
cmd := exec.Command(args[0], args[1:]...)
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
cmd.Dir = ".."
|
cmd.Dir = ".."
|
||||||
out, err := cmd.CombinedOutput()
|
out, err := cmd.CombinedOutput()
|
||||||
|
|
|
@ -0,0 +1,21 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Helper to sort blocks in text file. A block is a set of lines separated from others by empty line.
|
||||||
|
|
||||||
|
This is to workaround non-determinism in the output.
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
|
||||||
|
blocks = []
|
||||||
|
|
||||||
|
for line in sys.stdin:
|
||||||
|
if not line.strip():
|
||||||
|
if blocks and blocks[-1]:
|
||||||
|
blocks.append('')
|
||||||
|
continue
|
||||||
|
if not blocks:
|
||||||
|
blocks.append('')
|
||||||
|
blocks[-1] += line
|
||||||
|
|
||||||
|
blocks.sort()
|
||||||
|
print("\n".join(blocks))
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"project_name": "my_dbt_sql",
|
||||||
|
"http_path": "/sql/2.0/warehouses/f00dcafe",
|
||||||
|
"default_catalog": "main",
|
||||||
|
"personal_schemas": "yes, use a schema based on the current user name during development"
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle init dbt-sql --config-file ./input.json
|
||||||
|
|
||||||
|
Welcome to the dbt template for Databricks Asset Bundles!
|
||||||
|
|
||||||
|
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
|
||||||
|
workspace_host: $DATABRICKS_URL
|
||||||
|
|
||||||
|
📊 Your new project has been created in the 'my_dbt_sql' directory!
|
||||||
|
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
|
||||||
|
Refer to the README.md file for full "getting started" guide and production setup instructions.
|
||||||
|
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Name: my_dbt_sql
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/dev
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t prod
|
||||||
|
Name: my_dbt_sql
|
||||||
|
Target: prod
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_dbt_sql/prod
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,5 @@
|
||||||
|
trace $CLI bundle init dbt-sql --config-file ./input.json
|
||||||
|
|
||||||
|
cd my_dbt_sql
|
||||||
|
trace $CLI bundle validate -t dev
|
||||||
|
trace $CLI bundle validate -t prod
|
|
@ -0,0 +1 @@
|
||||||
|
rm -fr my_dbt_sql
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"project_name": "my_default_python",
|
||||||
|
"include_notebook": "yes",
|
||||||
|
"include_dlt": "yes",
|
||||||
|
"include_python": "yes"
|
||||||
|
}
|
|
@ -0,0 +1,30 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle init default-python --config-file ./input.json
|
||||||
|
|
||||||
|
Welcome to the default Python template for Databricks Asset Bundles!
|
||||||
|
Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): $DATABRICKS_URL
|
||||||
|
|
||||||
|
✨ Your new project has been created in the 'my_default_python' directory!
|
||||||
|
|
||||||
|
Please refer to the README.md file for "getting started" instructions.
|
||||||
|
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Name: my_default_python
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/dev
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t prod
|
||||||
|
Name: my_default_python
|
||||||
|
Target: prod
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_default_python/prod
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,5 @@
|
||||||
|
trace $CLI bundle init default-python --config-file ./input.json
|
||||||
|
|
||||||
|
cd my_default_python
|
||||||
|
trace $CLI bundle validate -t dev
|
||||||
|
trace $CLI bundle validate -t prod
|
|
@ -0,0 +1 @@
|
||||||
|
rm -fr my_default_python
|
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"project_name": "my_default_sql",
|
||||||
|
"http_path": "/sql/2.0/warehouses/f00dcafe",
|
||||||
|
"default_catalog": "main",
|
||||||
|
"personal_schemas": "yes, automatically use a schema based on the current user name during development"
|
||||||
|
}
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle init default-sql --config-file ./input.json
|
||||||
|
|
||||||
|
Welcome to the default SQL template for Databricks Asset Bundles!
|
||||||
|
|
||||||
|
A workspace was selected based on your current profile. For information about how to change this, see https://docs.databricks.com/dev-tools/cli/profiles.html.
|
||||||
|
workspace_host: $DATABRICKS_URL
|
||||||
|
|
||||||
|
✨ Your new project has been created in the 'my_default_sql' directory!
|
||||||
|
|
||||||
|
Please refer to the README.md file for "getting started" instructions.
|
||||||
|
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Name: my_default_sql
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/dev
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t prod
|
||||||
|
Name: my_default_sql
|
||||||
|
Target: prod
|
||||||
|
Workspace:
|
||||||
|
Host: $DATABRICKS_URL
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/my_default_sql/prod
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,5 @@
|
||||||
|
trace $CLI bundle init default-sql --config-file ./input.json
|
||||||
|
|
||||||
|
cd my_default_sql
|
||||||
|
trace $CLI bundle validate -t dev
|
||||||
|
trace $CLI bundle validate -t prod
|
|
@ -0,0 +1 @@
|
||||||
|
rm -fr my_default_sql
|
|
@ -4,7 +4,7 @@
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
@ -32,7 +32,7 @@
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
|
|
@ -20,7 +20,6 @@ targets:
|
||||||
jobs:
|
jobs:
|
||||||
foo:
|
foo:
|
||||||
job_clusters:
|
job_clusters:
|
||||||
# This does not work because merging is done before resolution
|
|
||||||
- job_cluster_key: "${var.mykey}"
|
- job_cluster_key: "${var.mykey}"
|
||||||
new_cluster:
|
new_cluster:
|
||||||
node_type_id: i3.xlarge
|
node_type_id: i3.xlarge
|
||||||
|
|
|
@ -4,22 +4,17 @@
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
"job_clusters": [
|
"job_clusters": [
|
||||||
{
|
|
||||||
"job_cluster_key": "key",
|
|
||||||
"new_cluster": {
|
|
||||||
"spark_version": "13.3.x-scala2.12"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"job_cluster_key": "key",
|
"job_cluster_key": "key",
|
||||||
"new_cluster": {
|
"new_cluster": {
|
||||||
"node_type_id": "i3.xlarge",
|
"node_type_id": "i3.xlarge",
|
||||||
"num_workers": 1
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -36,8 +31,8 @@
|
||||||
Name: override_job_cluster
|
Name: override_job_cluster
|
||||||
Target: development
|
Target: development
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development
|
||||||
|
|
||||||
Validation OK!
|
Validation OK!
|
||||||
|
|
||||||
|
@ -46,22 +41,17 @@ Validation OK!
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
"job_clusters": [
|
"job_clusters": [
|
||||||
{
|
|
||||||
"job_cluster_key": "key",
|
|
||||||
"new_cluster": {
|
|
||||||
"spark_version": "13.3.x-scala2.12"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"job_cluster_key": "key",
|
"job_cluster_key": "key",
|
||||||
"new_cluster": {
|
"new_cluster": {
|
||||||
"node_type_id": "i3.2xlarge",
|
"node_type_id": "i3.2xlarge",
|
||||||
"num_workers": 4
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -78,7 +68,7 @@ Validation OK!
|
||||||
Name: override_job_cluster
|
Name: override_job_cluster
|
||||||
Target: staging
|
Target: staging
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging
|
||||||
|
|
||||||
Validation OK!
|
Validation OK!
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json -t development
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -1,8 +1,3 @@
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate -o json -t development
|
|
||||||
Error: file ./test1.py not found
|
|
||||||
|
|
||||||
Exit code: 1
|
|
||||||
{
|
{
|
||||||
"name": "job",
|
"name": "job",
|
||||||
"queue": {
|
"queue": {
|
||||||
|
@ -36,6 +31,7 @@ Exit code: 1
|
||||||
>>> errcode $CLI bundle validate -o json -t staging
|
>>> errcode $CLI bundle validate -o json -t staging
|
||||||
Error: file ./test1.py not found
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
Exit code: 1
|
Exit code: 1
|
||||||
{
|
{
|
||||||
"name": "job",
|
"name": "job",
|
||||||
|
@ -66,3 +62,16 @@ Exit code: 1
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -t staging
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
Name: override_job_tasks
|
||||||
|
Target: staging
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
|
@ -1,2 +1,3 @@
|
||||||
trace errcode $CLI bundle validate -o json -t development | jq .resources.jobs.foo
|
trace errcode $CLI bundle validate -o json -t development 2> out.development.stderr.txt | jq .resources.jobs.foo
|
||||||
trace errcode $CLI bundle validate -o json -t staging | jq .resources.jobs.foo
|
trace errcode $CLI bundle validate -o json -t staging | jq .resources.jobs.foo
|
||||||
|
trace errcode $CLI bundle validate -t staging
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t dev
|
>>> $CLI bundle validate -o json -t dev
|
||||||
|
Warning: expected map, found string
|
||||||
|
at resources.clusters.my_cluster
|
||||||
|
in databricks.yml:6:17
|
||||||
|
|
||||||
{
|
{
|
||||||
"clusters": {
|
"clusters": {
|
||||||
"my_cluster": {
|
"my_cluster": {
|
||||||
|
@ -17,7 +21,7 @@ Warning: expected map, found string
|
||||||
Name: merge-string-map
|
Name: merge-string-map
|
||||||
Target: dev
|
Target: dev
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/merge-string-map/dev
|
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev
|
||||||
|
|
||||||
Found 1 warning
|
Found 1 warning
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
],
|
],
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "job",
|
"name": "job",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
@ -36,7 +36,7 @@
|
||||||
],
|
],
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "job",
|
"name": "job",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
Error: experiment undefined-experiment is not defined
|
||||||
|
at resources.experiments.undefined-experiment
|
||||||
|
in databricks.yml:11:26
|
||||||
|
|
||||||
|
Error: job undefined-job is not defined
|
||||||
|
at resources.jobs.undefined-job
|
||||||
|
in databricks.yml:6:19
|
||||||
|
|
||||||
|
Error: pipeline undefined-pipeline is not defined
|
||||||
|
at resources.pipelines.undefined-pipeline
|
||||||
|
in databricks.yml:14:24
|
||||||
|
|
||||||
|
Found 3 errors
|
||||||
|
|
||||||
|
Name: undefined-job
|
||||||
|
Target: default
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
# We need sort_blocks.py because the order of diagnostics is currently randomized
|
||||||
|
$CLI bundle validate 2>&1 | sort_blocks.py
|
|
@ -0,0 +1,12 @@
|
||||||
|
bundle:
|
||||||
|
name: complex-cross-ref
|
||||||
|
|
||||||
|
variables:
|
||||||
|
a:
|
||||||
|
default:
|
||||||
|
a_1: 500
|
||||||
|
a_2: ${var.b.b_2}
|
||||||
|
b:
|
||||||
|
default:
|
||||||
|
b_1: ${var.a.a_1}
|
||||||
|
b_2: 2.5
|
|
@ -0,0 +1,22 @@
|
||||||
|
{
|
||||||
|
"a": {
|
||||||
|
"default": {
|
||||||
|
"a_1": 500,
|
||||||
|
"a_2": 2.5
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"a_1": 500,
|
||||||
|
"a_2": 2.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"b": {
|
||||||
|
"default": {
|
||||||
|
"b_1": 500,
|
||||||
|
"b_2": 2.5
|
||||||
|
},
|
||||||
|
"value": {
|
||||||
|
"b_1": 500,
|
||||||
|
"b_2": 2.5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .variables
|
|
@ -0,0 +1,7 @@
|
||||||
|
bundle:
|
||||||
|
name: cycle
|
||||||
|
|
||||||
|
variables:
|
||||||
|
a:
|
||||||
|
default:
|
||||||
|
hello: ${var.a}
|
|
@ -0,0 +1,9 @@
|
||||||
|
Warning: Detected unresolved variables after 11 resolution rounds
|
||||||
|
|
||||||
|
Name: cycle
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
|
||||||
|
|
||||||
|
Found 1 warning
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate
|
|
@ -0,0 +1,10 @@
|
||||||
|
bundle:
|
||||||
|
name: cycle
|
||||||
|
|
||||||
|
variables:
|
||||||
|
a:
|
||||||
|
default:
|
||||||
|
hello: ${var.b}
|
||||||
|
b:
|
||||||
|
default:
|
||||||
|
hello: ${var.a}
|
|
@ -0,0 +1,9 @@
|
||||||
|
Warning: Detected unresolved variables after 11 resolution rounds
|
||||||
|
|
||||||
|
Name: cycle
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/cycle/default
|
||||||
|
|
||||||
|
Found 1 warning
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate
|
|
@ -0,0 +1,27 @@
|
||||||
|
# This example works and properly merges resources.jobs.job1.job_clusters.new_cluster and ${var.cluster}.
|
||||||
|
# retaining num_workers, spark_version and overriding node_type_id.
|
||||||
|
bundle:
|
||||||
|
name: TestResolveComplexVariable
|
||||||
|
|
||||||
|
variables:
|
||||||
|
cluster:
|
||||||
|
type: "complex"
|
||||||
|
value:
|
||||||
|
node_type_id: "Standard_DS3_v2"
|
||||||
|
num_workers: 2
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- new_cluster:
|
||||||
|
node_type_id: "random"
|
||||||
|
spark_version: 13.3.x-scala2.12
|
||||||
|
|
||||||
|
targets:
|
||||||
|
dev:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- new_cluster: ${var.cluster}
|
|
@ -0,0 +1,10 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"job_cluster_key": "",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .resources.jobs.job1.job_clusters
|
|
@ -0,0 +1,21 @@
|
||||||
|
bundle:
|
||||||
|
name: complex-transitive
|
||||||
|
|
||||||
|
variables:
|
||||||
|
catalog:
|
||||||
|
default: hive_metastore
|
||||||
|
spark_conf_1:
|
||||||
|
default:
|
||||||
|
"spark.databricks.sql.initial.catalog.name": ${var.catalog}
|
||||||
|
spark_conf:
|
||||||
|
default: ${var.spark_conf_1}
|
||||||
|
etl_cluster_config:
|
||||||
|
type: complex
|
||||||
|
default:
|
||||||
|
spark_version: 14.3.x-scala2.12
|
||||||
|
runtime_engine: PHOTON
|
||||||
|
spark_conf: ${var.spark_conf}
|
||||||
|
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster: ${var.etl_cluster_config}
|
|
@ -0,0 +1,3 @@
|
||||||
|
{
|
||||||
|
"spark.databricks.sql.initial.catalog.name": "hive_metastore"
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Currently, this incorrectly outputs variable reference instead of resolved value
|
||||||
|
$CLI bundle validate -o json | jq '.resources.clusters.my_cluster.spark_conf'
|
|
@ -0,0 +1,22 @@
|
||||||
|
bundle:
|
||||||
|
name: complex-transitive-deeper
|
||||||
|
|
||||||
|
variables:
|
||||||
|
catalog_1:
|
||||||
|
default:
|
||||||
|
name: hive_metastore
|
||||||
|
catalog:
|
||||||
|
default: ${var.catalog_1}
|
||||||
|
spark_conf:
|
||||||
|
default:
|
||||||
|
"spark.databricks.sql.initial.catalog.name": ${var.catalog.name}
|
||||||
|
etl_cluster_config:
|
||||||
|
type: complex
|
||||||
|
default:
|
||||||
|
spark_version: 14.3.x-scala2.12
|
||||||
|
runtime_engine: PHOTON
|
||||||
|
spark_conf: ${var.spark_conf}
|
||||||
|
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster: ${var.etl_cluster_config}
|
|
@ -0,0 +1,7 @@
|
||||||
|
Error: expected a map to index "variables.catalog.value.name", found string
|
||||||
|
|
||||||
|
{
|
||||||
|
"my_cluster": "${var.etl_cluster_config}"
|
||||||
|
}
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Currently, this errors instead of interpolating variables
|
||||||
|
$CLI bundle validate -o json | jq '.resources.clusters'
|
|
@ -1,3 +1,3 @@
|
||||||
{
|
{
|
||||||
"spark.databricks.sql.initial.catalog.name": "${var.catalog}"
|
"spark.databricks.sql.initial.catalog.name": "hive_metastore"
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveComplexVariableWithVarReference
|
||||||
|
|
||||||
|
variables:
|
||||||
|
package_version:
|
||||||
|
default: "1.0.0"
|
||||||
|
cluster_libraries:
|
||||||
|
type: "complex"
|
||||||
|
default:
|
||||||
|
- pypi:
|
||||||
|
package: "cicd_template==${var.package_version}"
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
tasks:
|
||||||
|
- libraries: ${var.cluster_libraries}
|
|
@ -0,0 +1,12 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"pypi": {
|
||||||
|
"package": "cicd_template==1.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"task_key": ""
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .resources.jobs.job1.tasks
|
|
@ -0,0 +1,34 @@
|
||||||
|
# Does not work currently, explicitly disabled, even though it works if you remove 'type: "complex"' lines
|
||||||
|
# Also fails to merge clusters.
|
||||||
|
bundle:
|
||||||
|
name: TestResolveComplexVariableReferencesWithComplexVariablesError
|
||||||
|
|
||||||
|
variables:
|
||||||
|
cluster:
|
||||||
|
type: "complex"
|
||||||
|
value:
|
||||||
|
node_type_id: "Standard_DS3_v2"
|
||||||
|
num_workers: 2
|
||||||
|
spark_conf: "${var.spark_conf}"
|
||||||
|
spark_conf:
|
||||||
|
type: "complex"
|
||||||
|
value:
|
||||||
|
spark.executor.memory: "4g"
|
||||||
|
spark.executor.cores: "2"
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: my_cluster
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: "random"
|
||||||
|
|
||||||
|
targets:
|
||||||
|
dev:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: my_cluster
|
||||||
|
new_cluster: ${var.cluster}
|
|
@ -0,0 +1,17 @@
|
||||||
|
Warning: unknown field: node_type_id
|
||||||
|
at resources.jobs.job1.job_clusters[0]
|
||||||
|
in databricks.yml:25:11
|
||||||
|
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"job_cluster_key": "my_cluster",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "Standard_DS3_v2",
|
||||||
|
"num_workers": 2,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.executor.cores": "2",
|
||||||
|
"spark.executor.memory": "4g"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .resources.jobs.job1.job_clusters
|
|
@ -4,7 +4,7 @@
|
||||||
"my_job": {
|
"my_job": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables/default/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/default/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
"my_job": {
|
"my_job": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables/dev/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables/dev/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
"my_job": {
|
"my_job": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/complex-variables-multiple-files/dev/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
bundle:
|
||||||
|
name: cycle
|
||||||
|
|
||||||
|
variables:
|
||||||
|
a:
|
||||||
|
default: ${var.b}
|
||||||
|
b:
|
||||||
|
default: ${var.a}
|
|
@ -0,0 +1,14 @@
|
||||||
|
Error: cycle detected in field resolution: variables.a.default -> var.b -> var.a -> var.b
|
||||||
|
|
||||||
|
{
|
||||||
|
"a": {
|
||||||
|
"default": "${var.b}",
|
||||||
|
"value": "${var.b}"
|
||||||
|
},
|
||||||
|
"b": {
|
||||||
|
"default": "${var.a}",
|
||||||
|
"value": "${var.a}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .variables
|
|
@ -3,8 +3,8 @@ Error: no value assigned to required variable a. Assignment can be done through
|
||||||
Name: empty${var.a}
|
Name: empty${var.a}
|
||||||
Target: default
|
Target: default
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/empty${var.a}/default
|
Path: /Workspace/Users/$USERNAME/.bundle/empty${var.a}/default
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -14,8 +14,8 @@ Error: no value assigned to required variable b. Assignment can be done through
|
||||||
Name: test bundle
|
Name: test bundle
|
||||||
Target: env-missing-a-required-variable-assignment
|
Target: env-missing-a-required-variable-assignment
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/test bundle/env-missing-a-required-variable-assignment
|
Path: /Workspace/Users/$USERNAME/.bundle/test bundle/env-missing-a-required-variable-assignment
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
bundle:
|
||||||
|
name: git
|
||||||
|
git:
|
||||||
|
# This is currently not supported
|
||||||
|
branch: ${var.deployment_branch}
|
||||||
|
|
||||||
|
variables:
|
||||||
|
deployment_branch:
|
||||||
|
# By setting deployment_branch to "" we set bundle.git.branch to "" which is the same unsetting it.
|
||||||
|
# This this should make CLI read branch from git and update bundle.git.branch accordingly. It should
|
||||||
|
# Also set bundle.git.inferred to true.
|
||||||
|
default: ""
|
||||||
|
|
||||||
|
targets:
|
||||||
|
prod:
|
||||||
|
default: true
|
||||||
|
dev:
|
||||||
|
variables:
|
||||||
|
deployment_branch: dev-branch
|
|
@ -0,0 +1,98 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json
|
||||||
|
{
|
||||||
|
"bundle": {
|
||||||
|
"environment": "prod",
|
||||||
|
"git": {
|
||||||
|
"actual_branch": "main",
|
||||||
|
"branch": "",
|
||||||
|
"bundle_root_path": ".",
|
||||||
|
},
|
||||||
|
"name": "git",
|
||||||
|
"target": "prod",
|
||||||
|
"terraform": {
|
||||||
|
"exec_path": "$TMPHOME"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sync": {
|
||||||
|
"paths": [
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"targets": null,
|
||||||
|
"variables": {
|
||||||
|
"deployment_branch": {
|
||||||
|
"default": "",
|
||||||
|
"value": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workspace": {
|
||||||
|
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/artifacts",
|
||||||
|
"current_user": {
|
||||||
|
"short_name": "$USERNAME",
|
||||||
|
"userName": "$USERNAME"
|
||||||
|
},
|
||||||
|
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/files",
|
||||||
|
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/resources",
|
||||||
|
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/prod",
|
||||||
|
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/prod/state"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate
|
||||||
|
Name: git
|
||||||
|
Target: prod
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/git/prod
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t dev
|
||||||
|
{
|
||||||
|
"bundle": {
|
||||||
|
"environment": "dev",
|
||||||
|
"git": {
|
||||||
|
"actual_branch": "main",
|
||||||
|
"branch": "dev-branch",
|
||||||
|
"bundle_root_path": ".",
|
||||||
|
},
|
||||||
|
"name": "git",
|
||||||
|
"target": "dev",
|
||||||
|
"terraform": {
|
||||||
|
"exec_path": "$TMPHOME"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sync": {
|
||||||
|
"paths": [
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"targets": null,
|
||||||
|
"variables": {
|
||||||
|
"deployment_branch": {
|
||||||
|
"default": "dev-branch",
|
||||||
|
"value": "dev-branch"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workspace": {
|
||||||
|
"artifact_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/artifacts",
|
||||||
|
"current_user": {
|
||||||
|
"short_name": "$USERNAME",
|
||||||
|
"userName": "$USERNAME"
|
||||||
|
},
|
||||||
|
"file_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/files",
|
||||||
|
"resource_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/resources",
|
||||||
|
"root_path": "/Workspace/Users/$USERNAME/.bundle/git/dev",
|
||||||
|
"state_path": "/Workspace/Users/$USERNAME/.bundle/git/dev/state"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Name: git
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/git/dev
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,6 @@
|
||||||
|
git-repo-init
|
||||||
|
trace $CLI bundle validate -o json | grep -v '"commit"'
|
||||||
|
trace $CLI bundle validate
|
||||||
|
trace $CLI bundle validate -o json -t dev | grep -v '"commit"'
|
||||||
|
trace $CLI bundle validate -t dev | grep -v '"commit"'
|
||||||
|
rm -fr .git
|
|
@ -0,0 +1,10 @@
|
||||||
|
bundle:
|
||||||
|
name: host
|
||||||
|
|
||||||
|
variables:
|
||||||
|
host:
|
||||||
|
default: https://nonexistent123.staging.cloud.databricks.com
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
# This is currently not supported
|
||||||
|
host: ${var.host}
|
|
@ -0,0 +1,38 @@
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json
|
||||||
|
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
|
||||||
|
|
||||||
|
{
|
||||||
|
"bundle": {
|
||||||
|
"environment": "default",
|
||||||
|
"name": "host",
|
||||||
|
"target": "default"
|
||||||
|
},
|
||||||
|
"sync": {
|
||||||
|
"paths": [
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"targets": null,
|
||||||
|
"variables": {
|
||||||
|
"host": {
|
||||||
|
"default": "https://nonexistent123.staging.cloud.databricks.com"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"workspace": {
|
||||||
|
"host": "${var.host}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate
|
||||||
|
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
|
||||||
|
|
||||||
|
Name: host
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
Host: ${var.host}
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace errcode $CLI bundle validate -o json
|
||||||
|
trace errcode $CLI bundle validate
|
|
@ -1,8 +1,8 @@
|
||||||
{
|
{
|
||||||
"artifact_path": "TestResolveVariableReferences/bar/artifacts",
|
"artifact_path": "TestResolveVariableReferences/bar/artifacts",
|
||||||
"current_user": {
|
"current_user": {
|
||||||
"short_name": "tester",
|
"short_name": "$USERNAME",
|
||||||
"userName": "tester@databricks.com"
|
"userName": "$USERNAME"
|
||||||
},
|
},
|
||||||
"file_path": "TestResolveVariableReferences/bar/baz",
|
"file_path": "TestResolveVariableReferences/bar/baz",
|
||||||
"resource_path": "TestResolveVariableReferences/bar/resources",
|
"resource_path": "TestResolveVariableReferences/bar/resources",
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveVariableReferencesForPrimitiveNonStringFields
|
||||||
|
|
||||||
|
variables:
|
||||||
|
no_alert_for_canceled_runs: {}
|
||||||
|
no_alert_for_skipped_runs: {}
|
||||||
|
min_workers: {}
|
||||||
|
max_workers: {}
|
||||||
|
spot_bid_max_price: {}
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job1:
|
||||||
|
notification_settings:
|
||||||
|
no_alert_for_canceled_runs: ${var.no_alert_for_canceled_runs}
|
||||||
|
no_alert_for_skipped_runs: ${var.no_alert_for_skipped_runs}
|
||||||
|
tasks:
|
||||||
|
- new_cluster:
|
||||||
|
autoscale:
|
||||||
|
min_workers: ${var.min_workers}
|
||||||
|
max_workers: ${var.max_workers}
|
||||||
|
azure_attributes:
|
||||||
|
spot_bid_max_price: ${var.spot_bid_max_price}
|
|
@ -0,0 +1,52 @@
|
||||||
|
{
|
||||||
|
"variables": {
|
||||||
|
"max_workers": {
|
||||||
|
"value": "2"
|
||||||
|
},
|
||||||
|
"min_workers": {
|
||||||
|
"value": "1"
|
||||||
|
},
|
||||||
|
"no_alert_for_canceled_runs": {
|
||||||
|
"value": "true"
|
||||||
|
},
|
||||||
|
"no_alert_for_skipped_runs": {
|
||||||
|
"value": "false"
|
||||||
|
},
|
||||||
|
"spot_bid_max_price": {
|
||||||
|
"value": "0.5"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"jobs": {
|
||||||
|
"job1": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/TestResolveVariableReferencesForPrimitiveNonStringFields/default/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"notification_settings": {
|
||||||
|
"no_alert_for_canceled_runs": true,
|
||||||
|
"no_alert_for_skipped_runs": false
|
||||||
|
},
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"autoscale": {
|
||||||
|
"max_workers": 2,
|
||||||
|
"min_workers": 1
|
||||||
|
},
|
||||||
|
"azure_attributes": {
|
||||||
|
"spot_bid_max_price": 0.5
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"task_key": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
export BUNDLE_VAR_no_alert_for_skipped_runs=false
|
||||||
|
export BUNDLE_VAR_max_workers=2
|
||||||
|
export BUNDLE_VAR_min_workers=3 # shadowed by --var below
|
||||||
|
$CLI bundle validate -o json --var no_alert_for_canceled_runs=true --var min_workers=1 --var spot_bid_max_price=0.5 | jq '{ variables, jobs: .resources.jobs }'
|
|
@ -0,0 +1,9 @@
|
||||||
|
bundle:
|
||||||
|
name: TestResolveVariableReferencesToBundleVariables
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
root_path: "${bundle.name}/${var.foo}"
|
||||||
|
|
||||||
|
variables:
|
||||||
|
foo:
|
||||||
|
value: "bar"
|
|
@ -0,0 +1,11 @@
|
||||||
|
{
|
||||||
|
"artifact_path": "TestResolveVariableReferencesToBundleVariables/bar/artifacts",
|
||||||
|
"current_user": {
|
||||||
|
"short_name": "$USERNAME",
|
||||||
|
"userName": "$USERNAME"
|
||||||
|
},
|
||||||
|
"file_path": "TestResolveVariableReferencesToBundleVariables/bar/files",
|
||||||
|
"resource_path": "TestResolveVariableReferencesToBundleVariables/bar/resources",
|
||||||
|
"root_path": "TestResolveVariableReferencesToBundleVariables/bar",
|
||||||
|
"state_path": "TestResolveVariableReferencesToBundleVariables/bar/state"
|
||||||
|
}
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate -o json | jq .workspace
|
|
@ -8,8 +8,8 @@ Error: no value assigned to required variable b. Assignment can be done through
|
||||||
Name: ${var.a} ${var.b}
|
Name: ${var.a} ${var.b}
|
||||||
Target: default
|
Target: default
|
||||||
Workspace:
|
Workspace:
|
||||||
User: tester@databricks.com
|
User: $USERNAME
|
||||||
Path: /Workspace/Users/tester@databricks.com/.bundle/${var.a} ${var.b}/default
|
Path: /Workspace/Users/$USERNAME/.bundle/${var.a} ${var.b}/default
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
"continuous": true,
|
"continuous": true,
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/use-default-variable-values/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/use-default-variable-values/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "a_string",
|
"name": "a_string",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
@ -33,7 +33,7 @@
|
||||||
"continuous": true,
|
"continuous": true,
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-string-variable/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-string-variable/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "overridden_string",
|
"name": "overridden_string",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
@ -54,7 +54,7 @@
|
||||||
"continuous": true,
|
"continuous": true,
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-int-variable/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-int-variable/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "a_string",
|
"name": "a_string",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
@ -75,7 +75,7 @@
|
||||||
"continuous": false,
|
"continuous": false,
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/foobar/override-both-bool-and-string-variables/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "overridden_string",
|
"name": "overridden_string",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
|
|
@ -1,6 +1,3 @@
|
||||||
# Prevent CLI from downloading terraform in each test:
|
|
||||||
export DATABRICKS_TF_EXEC_PATH=/tmp/
|
|
||||||
|
|
||||||
errcode() {
|
errcode() {
|
||||||
# Temporarily disable 'set -e' to prevent the script from exiting on error
|
# Temporarily disable 'set -e' to prevent the script from exiting on error
|
||||||
set +e
|
set +e
|
||||||
|
@ -34,3 +31,12 @@ trace() {
|
||||||
|
|
||||||
return $?
|
return $?
|
||||||
}
|
}
|
||||||
|
|
||||||
|
git-repo-init() {
|
||||||
|
git init -qb main
|
||||||
|
git config --global core.autocrlf false
|
||||||
|
git config user.name "Tester"
|
||||||
|
git config user.email "tester@databricks.com"
|
||||||
|
git add databricks.yml
|
||||||
|
git commit -qm 'Add databricks.yml'
|
||||||
|
}
|
||||||
|
|
|
@ -2,11 +2,11 @@ package acceptance_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"net"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/catalog"
|
||||||
"github.com/databricks/databricks-sdk-go/service/compute"
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
||||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
"github.com/databricks/databricks-sdk-go/service/workspace"
|
"github.com/databricks/databricks-sdk-go/service/workspace"
|
||||||
|
@ -14,8 +14,7 @@ import (
|
||||||
|
|
||||||
type TestServer struct {
|
type TestServer struct {
|
||||||
*httptest.Server
|
*httptest.Server
|
||||||
Mux *http.ServeMux
|
Mux *http.ServeMux
|
||||||
Port int
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type HandlerFunc func(r *http.Request) (any, error)
|
type HandlerFunc func(r *http.Request) (any, error)
|
||||||
|
@ -23,12 +22,10 @@ type HandlerFunc func(r *http.Request) (any, error)
|
||||||
func NewTestServer() *TestServer {
|
func NewTestServer() *TestServer {
|
||||||
mux := http.NewServeMux()
|
mux := http.NewServeMux()
|
||||||
server := httptest.NewServer(mux)
|
server := httptest.NewServer(mux)
|
||||||
port := server.Listener.Addr().(*net.TCPAddr).Port
|
|
||||||
|
|
||||||
return &TestServer{
|
return &TestServer{
|
||||||
Server: server,
|
Server: server,
|
||||||
Mux: mux,
|
Mux: mux,
|
||||||
Port: port,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,4 +123,27 @@ func AddHandlers(server *TestServer) {
|
||||||
ResourceId: "1001",
|
ResourceId: "1001",
|
||||||
}, nil
|
}, nil
|
||||||
})
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.1/unity-catalog/current-metastore-assignment", func(r *http.Request) (any, error) {
|
||||||
|
return catalog.MetastoreAssignment{
|
||||||
|
DefaultCatalogName: "main",
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
|
|
||||||
|
server.Handle("/api/2.0/permissions/directories/1001", func(r *http.Request) (any, error) {
|
||||||
|
return workspace.WorkspaceObjectPermissions{
|
||||||
|
ObjectId: "1001",
|
||||||
|
ObjectType: "DIRECTORY",
|
||||||
|
AccessControlList: []workspace.WorkspaceObjectAccessControlResponse{
|
||||||
|
{
|
||||||
|
UserName: "tester@databricks.com",
|
||||||
|
AllPermissions: []workspace.WorkspaceObjectPermission{
|
||||||
|
{
|
||||||
|
PermissionLevel: "CAN_MANAGE",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,50 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/dynvar"
|
||||||
|
)
|
||||||
|
|
||||||
|
type interpolateVariables struct{}
|
||||||
|
|
||||||
|
func (i *interpolateVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
pattern := dyn.NewPattern(
|
||||||
|
dyn.Key("resources"),
|
||||||
|
dyn.Key("apps"),
|
||||||
|
dyn.AnyKey(),
|
||||||
|
dyn.Key("config"),
|
||||||
|
)
|
||||||
|
|
||||||
|
tfToConfigMap := map[string]string{}
|
||||||
|
for k, r := range config.SupportedResources() {
|
||||||
|
tfToConfigMap[r.TerraformResourceName] = k
|
||||||
|
}
|
||||||
|
|
||||||
|
err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) {
|
||||||
|
return dyn.MapByPattern(root, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
|
||||||
|
return dynvar.Resolve(v, func(path dyn.Path) (dyn.Value, error) {
|
||||||
|
key, ok := tfToConfigMap[path[0].Key()]
|
||||||
|
if ok {
|
||||||
|
path = dyn.NewPath(dyn.Key("resources"), dyn.Key(key)).Append(path[1:]...)
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.GetByPath(root, path)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
return diag.FromErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (i *interpolateVariables) Name() string {
|
||||||
|
return "apps.InterpolateVariables"
|
||||||
|
}
|
||||||
|
|
||||||
|
func InterpolateVariables() bundle.Mutator {
|
||||||
|
return &interpolateVariables{}
|
||||||
|
}
|
|
@ -0,0 +1,49 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAppInterpolateVariables(t *testing.T) {
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
Config: config.Root{
|
||||||
|
Resources: config.Resources{
|
||||||
|
Apps: map[string]*resources.App{
|
||||||
|
"my_app_1": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "my_app_1",
|
||||||
|
},
|
||||||
|
Config: map[string]any{
|
||||||
|
"command": []string{"echo", "hello"},
|
||||||
|
"env": []map[string]string{
|
||||||
|
{"name": "JOB_ID", "value": "${databricks_job.my_job.id}"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"my_app_2": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "my_app_2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Jobs: map[string]*resources.Job{
|
||||||
|
"my_job": {
|
||||||
|
ID: "123",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, InterpolateVariables())
|
||||||
|
require.Empty(t, diags)
|
||||||
|
require.Equal(t, []any{map[string]any{"name": "JOB_ID", "value": "123"}}, b.Config.Resources.Apps["my_app_1"].Config["env"])
|
||||||
|
require.Nil(t, b.Config.Resources.Apps["my_app_2"].Config)
|
||||||
|
}
|
|
@ -0,0 +1,29 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/cmdio"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
)
|
||||||
|
|
||||||
|
type slowDeployMessage struct{}
|
||||||
|
|
||||||
|
// TODO: needs to be removed when when no_compute option becomes available in TF provider and used in DABs
|
||||||
|
// See https://github.com/databricks/cli/pull/2144
|
||||||
|
func (v *slowDeployMessage) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
if len(b.Config.Resources.Apps) > 0 {
|
||||||
|
cmdio.LogString(ctx, "Note: Databricks apps included in this bundle may increase initial deployment time due to compute provisioning.")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *slowDeployMessage) Name() string {
|
||||||
|
return "apps.SlowDeployMessage"
|
||||||
|
}
|
||||||
|
|
||||||
|
func SlowDeployMessage() bundle.Mutator {
|
||||||
|
return &slowDeployMessage{}
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/deploy"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
"github.com/databricks/cli/libs/filer"
|
||||||
|
"golang.org/x/sync/errgroup"
|
||||||
|
|
||||||
|
"gopkg.in/yaml.v3"
|
||||||
|
)
|
||||||
|
|
||||||
|
type uploadConfig struct {
|
||||||
|
filerFactory deploy.FilerFactory
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploadConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
errGroup, ctx := errgroup.WithContext(ctx)
|
||||||
|
|
||||||
|
mu := sync.Mutex{}
|
||||||
|
for key, app := range b.Config.Resources.Apps {
|
||||||
|
// If the app has a config, we need to deploy it first.
|
||||||
|
// It means we need to write app.yml file with the content of the config field
|
||||||
|
// to the remote source code path of the app.
|
||||||
|
if app.Config != nil {
|
||||||
|
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
|
||||||
|
|
||||||
|
buf, err := configToYaml(app)
|
||||||
|
if err != nil {
|
||||||
|
return diag.FromErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := u.filerFactory(b)
|
||||||
|
if err != nil {
|
||||||
|
return diag.FromErr(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
errGroup.Go(func() error {
|
||||||
|
err := f.Write(ctx, path.Join(appPath, "app.yml"), buf, filer.OverwriteIfExists)
|
||||||
|
if err != nil {
|
||||||
|
mu.Lock()
|
||||||
|
diags = append(diags, diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: "Failed to save config",
|
||||||
|
Detail: fmt.Sprintf("Failed to write %s file: %s", path.Join(app.SourceCodePath, "app.yml"), err),
|
||||||
|
Locations: b.Config.GetLocations("resources.apps." + key),
|
||||||
|
})
|
||||||
|
mu.Unlock()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := errGroup.Wait(); err != nil {
|
||||||
|
return diags.Extend(diag.FromErr(err))
|
||||||
|
}
|
||||||
|
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name implements bundle.Mutator.
|
||||||
|
func (u *uploadConfig) Name() string {
|
||||||
|
return "apps:UploadConfig"
|
||||||
|
}
|
||||||
|
|
||||||
|
func UploadConfig() bundle.Mutator {
|
||||||
|
return &uploadConfig{
|
||||||
|
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
|
||||||
|
return filer.NewWorkspaceFilesClient(b.WorkspaceClient(), b.Config.Workspace.FilePath)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func configToYaml(app *resources.App) (*bytes.Buffer, error) {
|
||||||
|
buf := bytes.NewBuffer(nil)
|
||||||
|
enc := yaml.NewEncoder(buf)
|
||||||
|
enc.SetIndent(2)
|
||||||
|
|
||||||
|
err := enc.Encode(app.Config)
|
||||||
|
defer enc.Close()
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to encode app config to yaml: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf, nil
|
||||||
|
}
|
|
@ -0,0 +1,75 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
|
mockfiler "github.com/databricks/cli/internal/mocks/libs/filer"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/filer"
|
||||||
|
"github.com/databricks/cli/libs/vfs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||||
|
"github.com/stretchr/testify/mock"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAppUploadConfig(t *testing.T) {
|
||||||
|
root := t.TempDir()
|
||||||
|
err := os.MkdirAll(filepath.Join(root, "my_app"), 0o700)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
BundleRootPath: root,
|
||||||
|
SyncRootPath: root,
|
||||||
|
SyncRoot: vfs.MustNew(root),
|
||||||
|
Config: config.Root{
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
RootPath: "/Workspace/Users/foo@bar.com/",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Apps: map[string]*resources.App{
|
||||||
|
"my_app": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "my_app",
|
||||||
|
},
|
||||||
|
SourceCodePath: "./my_app",
|
||||||
|
Config: map[string]any{
|
||||||
|
"command": []string{"echo", "hello"},
|
||||||
|
"env": []map[string]string{
|
||||||
|
{"name": "MY_APP", "value": "my value"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
mockFiler := mockfiler.NewMockFiler(t)
|
||||||
|
mockFiler.EXPECT().Write(mock.Anything, "my_app/app.yml", bytes.NewBufferString(`command:
|
||||||
|
- echo
|
||||||
|
- hello
|
||||||
|
env:
|
||||||
|
- name: MY_APP
|
||||||
|
value: my value
|
||||||
|
`), filer.OverwriteIfExists).Return(nil)
|
||||||
|
|
||||||
|
u := uploadConfig{
|
||||||
|
filerFactory: func(b *bundle.Bundle) (filer.Filer, error) {
|
||||||
|
return mockFiler, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}})
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u))
|
||||||
|
require.NoError(t, diags.Error())
|
||||||
|
}
|
|
@ -0,0 +1,53 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/libs/diag"
|
||||||
|
)
|
||||||
|
|
||||||
|
type validate struct{}
|
||||||
|
|
||||||
|
func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||||
|
var diags diag.Diagnostics
|
||||||
|
possibleConfigFiles := []string{"app.yml", "app.yaml"}
|
||||||
|
usedSourceCodePaths := make(map[string]string)
|
||||||
|
|
||||||
|
for key, app := range b.Config.Resources.Apps {
|
||||||
|
if _, ok := usedSourceCodePaths[app.SourceCodePath]; ok {
|
||||||
|
diags = append(diags, diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: "Duplicate app source code path",
|
||||||
|
Detail: fmt.Sprintf("app resource '%s' has the same source code path as app resource '%s', this will lead to the app configuration being overriden by each other", key, usedSourceCodePaths[app.SourceCodePath]),
|
||||||
|
Locations: b.Config.GetLocations(fmt.Sprintf("resources.apps.%s.source_code_path", key)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
usedSourceCodePaths[app.SourceCodePath] = key
|
||||||
|
|
||||||
|
for _, configFile := range possibleConfigFiles {
|
||||||
|
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
|
||||||
|
cf := path.Join(appPath, configFile)
|
||||||
|
if _, err := b.SyncRoot.Stat(cf); err == nil {
|
||||||
|
diags = append(diags, diag.Diagnostic{
|
||||||
|
Severity: diag.Error,
|
||||||
|
Summary: configFile + " detected",
|
||||||
|
Detail: fmt.Sprintf("remove %s and use 'config' property for app resource '%s' instead", cf, app.Name),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return diags
|
||||||
|
}
|
||||||
|
|
||||||
|
func (v *validate) Name() string {
|
||||||
|
return "apps.Validate"
|
||||||
|
}
|
||||||
|
|
||||||
|
func Validate() bundle.Mutator {
|
||||||
|
return &validate{}
|
||||||
|
}
|
|
@ -0,0 +1,97 @@
|
||||||
|
package apps
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/bundle"
|
||||||
|
"github.com/databricks/cli/bundle/config"
|
||||||
|
"github.com/databricks/cli/bundle/config/mutator"
|
||||||
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
"github.com/databricks/cli/bundle/internal/bundletest"
|
||||||
|
"github.com/databricks/cli/internal/testutil"
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/vfs"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAppsValidate(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
testutil.Touch(t, tmpDir, "app1", "app.yml")
|
||||||
|
testutil.Touch(t, tmpDir, "app2", "app.py")
|
||||||
|
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
BundleRootPath: tmpDir,
|
||||||
|
SyncRootPath: tmpDir,
|
||||||
|
SyncRoot: vfs.MustNew(tmpDir),
|
||||||
|
Config: config.Root{
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
FilePath: "/foo/bar/",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Apps: map[string]*resources.App{
|
||||||
|
"app1": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "app1",
|
||||||
|
},
|
||||||
|
SourceCodePath: "./app1",
|
||||||
|
},
|
||||||
|
"app2": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "app2",
|
||||||
|
},
|
||||||
|
SourceCodePath: "./app2",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate()))
|
||||||
|
require.Len(t, diags, 1)
|
||||||
|
require.Equal(t, "app.yml detected", diags[0].Summary)
|
||||||
|
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAppsValidateSameSourcePath(t *testing.T) {
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
testutil.Touch(t, tmpDir, "app1", "app.py")
|
||||||
|
|
||||||
|
b := &bundle.Bundle{
|
||||||
|
BundleRootPath: tmpDir,
|
||||||
|
SyncRootPath: tmpDir,
|
||||||
|
SyncRoot: vfs.MustNew(tmpDir),
|
||||||
|
Config: config.Root{
|
||||||
|
Workspace: config.Workspace{
|
||||||
|
FilePath: "/foo/bar/",
|
||||||
|
},
|
||||||
|
Resources: config.Resources{
|
||||||
|
Apps: map[string]*resources.App{
|
||||||
|
"app1": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "app1",
|
||||||
|
},
|
||||||
|
SourceCodePath: "./app1",
|
||||||
|
},
|
||||||
|
"app2": {
|
||||||
|
App: &apps.App{
|
||||||
|
Name: "app2",
|
||||||
|
},
|
||||||
|
SourceCodePath: "./app1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
|
||||||
|
|
||||||
|
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate()))
|
||||||
|
require.Len(t, diags, 1)
|
||||||
|
require.Equal(t, "Duplicate app source code path", diags[0].Summary)
|
||||||
|
require.Contains(t, diags[0].Detail, "has the same source code path as app resource")
|
||||||
|
}
|
|
@ -57,6 +57,9 @@ type Bundle struct {
|
||||||
// It is loaded from the bundle configuration files and mutators may update it.
|
// It is loaded from the bundle configuration files and mutators may update it.
|
||||||
Config config.Root
|
Config config.Root
|
||||||
|
|
||||||
|
// Target stores a snapshot of the Root.Bundle.Target configuration when it was selected by SelectTarget.
|
||||||
|
Target *config.Target `json:"target_config,omitempty" bundle:"internal"`
|
||||||
|
|
||||||
// Metadata about the bundle deployment. This is the interface Databricks services
|
// Metadata about the bundle deployment. This is the interface Databricks services
|
||||||
// rely on to integrate with bundles when they need additional information about
|
// rely on to integrate with bundles when they need additional information about
|
||||||
// a bundle deployment.
|
// a bundle deployment.
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
package generate
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
"github.com/databricks/cli/libs/dyn/convert"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/apps"
|
||||||
|
)
|
||||||
|
|
||||||
|
func ConvertAppToValue(app *apps.App, sourceCodePath string, appConfig map[string]any) (dyn.Value, error) {
|
||||||
|
ac, err := convert.FromTyped(appConfig, dyn.NilValue)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ar, err := convert.FromTyped(app.Resources, dyn.NilValue)
|
||||||
|
if err != nil {
|
||||||
|
return dyn.NilValue, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// The majority of fields of the app struct are read-only.
|
||||||
|
// We copy the relevant fields manually.
|
||||||
|
dv := map[string]dyn.Value{
|
||||||
|
"name": dyn.NewValue(app.Name, []dyn.Location{{Line: 1}}),
|
||||||
|
"description": dyn.NewValue(app.Description, []dyn.Location{{Line: 2}}),
|
||||||
|
"source_code_path": dyn.NewValue(sourceCodePath, []dyn.Location{{Line: 3}}),
|
||||||
|
}
|
||||||
|
|
||||||
|
if ac.Kind() != dyn.KindNil {
|
||||||
|
dv["config"] = ac.WithLocations([]dyn.Location{{Line: 4}})
|
||||||
|
}
|
||||||
|
|
||||||
|
if ar.Kind() != dyn.KindNil {
|
||||||
|
dv["resources"] = ar.WithLocations([]dyn.Location{{Line: 5}})
|
||||||
|
}
|
||||||
|
|
||||||
|
return dyn.V(dv), nil
|
||||||
|
}
|
|
@ -221,6 +221,8 @@ func (m *applyPresets) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnos
|
||||||
dashboard.DisplayName = prefix + dashboard.DisplayName
|
dashboard.DisplayName = prefix + dashboard.DisplayName
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Apps: No presets
|
||||||
|
|
||||||
return diags
|
return diags
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue