mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin/main' into remove-run-as
This commit is contained in:
commit
ee86ff1b41
|
@ -1 +1 @@
|
||||||
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
|
779817ed8d63031f5ea761fbd25ee84f38feec0d
|
|
@ -140,9 +140,9 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{$method := .}}
|
{{$method := .}}
|
||||||
{{ if not .IsJsonOnly }}
|
{{ if not .IsJsonOnly }}
|
||||||
{{range $request.Fields -}}
|
{{range .AllFields -}}
|
||||||
{{- if not .Required -}}
|
{{- if not .Required -}}
|
||||||
{{if .Entity.IsObject }}// TODO: complex arg: {{.Name}}
|
{{if .Entity.IsObject}}{{if not (eq . $method.RequestBodyField) }}// TODO: complex arg: {{.Name}}{{end}}
|
||||||
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
|
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
|
||||||
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
|
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
|
||||||
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
|
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
* @pietern @andrewnester @shreyas-goenka @denik
|
|
@ -4,3 +4,7 @@ updates:
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "weekly"
|
interval: "weekly"
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "monthly"
|
||||||
|
|
|
@ -0,0 +1,38 @@
|
||||||
|
name: "Close Stale Issues"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * *" # Run at midnight every day
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cleanup:
|
||||||
|
name: Stale issue job
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
issues: write
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
|
||||||
|
with:
|
||||||
|
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
||||||
|
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
||||||
|
|
||||||
|
# These labels are required
|
||||||
|
stale-issue-label: Stale
|
||||||
|
stale-pr-label: Stale
|
||||||
|
|
||||||
|
exempt-issue-labels: No Autoclose
|
||||||
|
exempt-pr-labels: No Autoclose
|
||||||
|
|
||||||
|
# Issue timing
|
||||||
|
days-before-stale: 60
|
||||||
|
days-before-close: 30
|
||||||
|
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
loglevel: DEBUG
|
|
@ -13,12 +13,19 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
comment-on-pr:
|
comment-on-pr:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
|
# Only run this job for PRs from forks.
|
||||||
|
# Integration tests are not run automatically for PRs from forks.
|
||||||
|
if: "${{ github.event.pull_request.head.repo.fork }}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Delete old comments
|
- name: Delete old comments
|
||||||
env:
|
env:
|
||||||
|
@ -43,7 +50,7 @@ jobs:
|
||||||
run: |
|
run: |
|
||||||
gh pr comment ${{ github.event.pull_request.number }} --body \
|
gh pr comment ${{ github.event.pull_request.number }} --body \
|
||||||
"<!-- INTEGRATION_TESTS_MANUAL -->
|
"<!-- INTEGRATION_TESTS_MANUAL -->
|
||||||
If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
|
An authorized user can trigger integration tests manually by following the instructions below:
|
||||||
|
|
||||||
Trigger:
|
Trigger:
|
||||||
[go/deco-tests-run/cli](https://go/deco-tests-run/cli)
|
[go/deco-tests-run/cli](https://go/deco-tests-run/cli)
|
||||||
|
|
|
@ -17,7 +17,9 @@ jobs:
|
||||||
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
|
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
|
||||||
#
|
#
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Auto-approve squashed commit
|
- name: Auto-approve squashed commit
|
||||||
|
|
|
@ -11,13 +11,16 @@ jobs:
|
||||||
# This workflow triggers the integration test workflow in a different repository.
|
# This workflow triggers the integration test workflow in a different repository.
|
||||||
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: "test-trigger-is"
|
environment: "test-trigger-is"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -5,41 +5,25 @@ on:
|
||||||
types: [opened, synchronize]
|
types: [opened, synchronize]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check-token:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment: "test-trigger-is"
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
has_token: ${{ steps.set-token-status.outputs.has_token }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
|
|
||||||
id: set-token-status
|
|
||||||
run: |
|
|
||||||
if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
|
|
||||||
echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
|
|
||||||
echo "::set-output name=has_token::false"
|
|
||||||
else
|
|
||||||
echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
|
|
||||||
echo "::set-output name=has_token::true"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Trigger for pull requests.
|
# Trigger for pull requests.
|
||||||
#
|
#
|
||||||
# This workflow triggers the integration test workflow in a different repository.
|
# This workflow triggers the integration test workflow in a different repository.
|
||||||
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
# It requires secrets from the "test-trigger-is" environment, which are only available to authorized users.
|
||||||
# It depends on the "check-token" workflow to confirm access to this environment to avoid failures.
|
|
||||||
trigger:
|
trigger:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: "test-trigger-is"
|
environment: "test-trigger-is"
|
||||||
|
|
||||||
if: needs.check-token.outputs.has_token == 'true'
|
# Only run this job for PRs from branches on the main repository and not from forks.
|
||||||
needs: check-token
|
# Workflows triggered by PRs from forks don't have access to the "test-trigger-is" environment.
|
||||||
|
if: "${{ !github.event.pull_request.head.repo.fork }}"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@v1
|
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -2,15 +2,27 @@ name: publish-winget
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag:
|
||||||
|
description: 'Tag to publish'
|
||||||
|
default: ''
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
publish-to-winget-pkgs:
|
publish-to-winget-pkgs:
|
||||||
runs-on: windows-latest
|
runs-on:
|
||||||
|
group: databricks-protected-runner-group
|
||||||
|
labels: windows-server-latest
|
||||||
|
|
||||||
environment: release
|
environment: release
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # https://github.com/vedantmgoyal2009/winget-releaser/releases/tag/v2
|
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2
|
||||||
with:
|
with:
|
||||||
identifier: Databricks.DatabricksCLI
|
identifier: Databricks.DatabricksCLI
|
||||||
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
||||||
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||||
fork-user: eng-dev-ecosystem-bot
|
fork-user: eng-dev-ecosystem-bot
|
||||||
|
|
||||||
|
# Use the tag from the input, or the ref name if the input is not provided.
|
||||||
|
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||||
|
release-tag: ${{ inputs.tag || github.ref_name }}
|
||||||
|
|
|
@ -13,9 +13,26 @@ on:
|
||||||
# seed the build cache.
|
# seed the build cache.
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0,12 * * *' # Runs at 00:00 and 12:00 UTC daily
|
||||||
|
|
||||||
|
env:
|
||||||
|
GOTESTSUM_FORMAT: github-actions
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
cleanups:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
steps:
|
||||||
|
- name: Clean up cache if running on schedule
|
||||||
|
if: ${{ github.event_name == 'schedule' }}
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: gh cache delete --all --repo databricks/cli || true
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
|
needs: cleanups
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -28,20 +45,26 @@ jobs:
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@v4
|
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
||||||
|
|
||||||
|
- name: Run ruff
|
||||||
|
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
|
||||||
|
with:
|
||||||
|
version: "0.9.1"
|
||||||
|
args: "format --check"
|
||||||
|
|
||||||
- name: Set go env
|
- name: Set go env
|
||||||
run: |
|
run: |
|
||||||
|
@ -54,17 +77,22 @@ jobs:
|
||||||
make vendor
|
make vendor
|
||||||
pip3 install wheel
|
pip3 install wheel
|
||||||
|
|
||||||
- name: Run tests
|
- name: Run tests with coverage
|
||||||
run: make testonly
|
run: make cover
|
||||||
|
|
||||||
golangci:
|
golangci:
|
||||||
|
needs: cleanups
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-go@v5
|
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
cache-dependency-path: |
|
||||||
|
go.sum
|
||||||
|
.golangci.yaml
|
||||||
- name: Run go mod tidy
|
- name: Run go mod tidy
|
||||||
run: |
|
run: |
|
||||||
go mod tidy
|
go mod tidy
|
||||||
|
@ -73,22 +101,34 @@ jobs:
|
||||||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||||
git diff --exit-code
|
git diff --exit-code
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@v6
|
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1
|
||||||
with:
|
with:
|
||||||
version: v1.62.2
|
version: v1.63.4
|
||||||
args: --timeout=15m
|
args: --timeout=15m
|
||||||
|
|
||||||
validate-bundle-schema:
|
validate-bundle-schema:
|
||||||
|
needs: cleanups
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
|
cache-dependency-path: |
|
||||||
|
go.sum
|
||||||
|
bundle/internal/schema/*.*
|
||||||
|
|
||||||
|
- name: Verify that the schema is up to date
|
||||||
|
run: |
|
||||||
|
if ! ( make schema && git diff --exit-code ); then
|
||||||
|
echo "The schema is not up to date. Please run 'make schema' and commit the changes."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Github repo: https://github.com/ajv-validator/ajv-cli
|
# Github repo: https://github.com/ajv-validator/ajv-cli
|
||||||
- name: Install ajv-cli
|
- name: Install ajv-cli
|
||||||
|
|
|
@ -20,16 +20,19 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
runs-on: ubuntu-latest
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
|
@ -45,27 +48,27 @@ jobs:
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
id: releaser
|
id: releaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||||
with:
|
with:
|
||||||
version: ~> v2
|
version: ~> v2
|
||||||
args: release --snapshot --skip docker
|
args: release --snapshot --skip docker
|
||||||
|
|
||||||
- name: Upload macOS binaries
|
- name: Upload macOS binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_darwin_snapshot
|
name: cli_darwin_snapshot
|
||||||
path: |
|
path: |
|
||||||
dist/*_darwin_*/
|
dist/*_darwin_*/
|
||||||
|
|
||||||
- name: Upload Linux binaries
|
- name: Upload Linux binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_linux_snapshot
|
name: cli_linux_snapshot
|
||||||
path: |
|
path: |
|
||||||
dist/*_linux_*/
|
dist/*_linux_*/
|
||||||
|
|
||||||
- name: Upload Windows binaries
|
- name: Upload Windows binaries
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||||
with:
|
with:
|
||||||
name: cli_windows_snapshot
|
name: cli_windows_snapshot
|
||||||
path: |
|
path: |
|
||||||
|
@ -85,7 +88,7 @@ jobs:
|
||||||
# Snapshot release may only be updated for commits to the main branch.
|
# Snapshot release may only be updated for commits to the main branch.
|
||||||
if: github.ref == 'refs/heads/main'
|
if: github.ref == 'refs/heads/main'
|
||||||
|
|
||||||
uses: softprops/action-gh-release@v1
|
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||||
with:
|
with:
|
||||||
name: Snapshot
|
name: Snapshot
|
||||||
prerelease: true
|
prerelease: true
|
||||||
|
|
|
@ -9,18 +9,22 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
goreleaser:
|
goreleaser:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
outputs:
|
outputs:
|
||||||
artifacts: ${{ steps.releaser.outputs.artifacts }}
|
artifacts: ${{ steps.releaser.outputs.artifacts }}
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository and submodules
|
- name: Checkout repository and submodules
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
fetch-tags: true
|
fetch-tags: true
|
||||||
|
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@v5
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version: 1.23.4
|
||||||
|
|
||||||
|
@ -33,7 +37,7 @@ jobs:
|
||||||
|
|
||||||
# Log into the GitHub Container Registry. The goreleaser action will create
|
# Log into the GitHub Container Registry. The goreleaser action will create
|
||||||
# the docker images and push them to the GitHub Container Registry.
|
# the docker images and push them to the GitHub Container Registry.
|
||||||
- uses: "docker/login-action@v3"
|
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: "ghcr.io"
|
registry: "ghcr.io"
|
||||||
username: "${{ github.actor }}"
|
username: "${{ github.actor }}"
|
||||||
|
@ -42,11 +46,11 @@ jobs:
|
||||||
# QEMU is required to build cross platform docker images using buildx.
|
# QEMU is required to build cross platform docker images using buildx.
|
||||||
# It allows virtualization of the CPU architecture at the application level.
|
# It allows virtualization of the CPU architecture at the application level.
|
||||||
- name: Set up QEMU dependency
|
- name: Set up QEMU dependency
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||||
|
|
||||||
- name: Run GoReleaser
|
- name: Run GoReleaser
|
||||||
id: releaser
|
id: releaser
|
||||||
uses: goreleaser/goreleaser-action@v6
|
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||||
with:
|
with:
|
||||||
version: ~> v2
|
version: ~> v2
|
||||||
args: release
|
args: release
|
||||||
|
@ -54,8 +58,12 @@ jobs:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
create-setup-cli-release-pr:
|
create-setup-cli-release-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -63,7 +71,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update setup-cli
|
- name: Update setup-cli
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
@ -78,8 +86,12 @@ jobs:
|
||||||
});
|
});
|
||||||
|
|
||||||
create-homebrew-tap-release-pr:
|
create-homebrew-tap-release-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -87,7 +99,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update homebrew-tap
|
- name: Update homebrew-tap
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
@ -115,8 +127,12 @@ jobs:
|
||||||
});
|
});
|
||||||
|
|
||||||
create-vscode-extension-update-pr:
|
create-vscode-extension-update-pr:
|
||||||
|
runs-on:
|
||||||
|
group: databricks-deco-testing-runner-group
|
||||||
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
needs: goreleaser
|
needs: goreleaser
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
steps:
|
||||||
- name: Set VERSION variable from tag
|
- name: Set VERSION variable from tag
|
||||||
run: |
|
run: |
|
||||||
|
@ -124,7 +140,7 @@ jobs:
|
||||||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Update CLI version in the VSCode extension
|
- name: Update CLI version in the VSCode extension
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||||
with:
|
with:
|
||||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||||
script: |
|
script: |
|
||||||
|
|
|
@ -20,6 +20,7 @@ dist/
|
||||||
|
|
||||||
*.log
|
*.log
|
||||||
coverage.txt
|
coverage.txt
|
||||||
|
coverage-acceptance.txt
|
||||||
|
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pyc
|
*.pyc
|
||||||
|
@ -31,3 +32,4 @@ __pycache__
|
||||||
.vscode/tasks.json
|
.vscode/tasks.json
|
||||||
|
|
||||||
.databricks
|
.databricks
|
||||||
|
.ruff_cache
|
||||||
|
|
|
@ -11,12 +11,24 @@ linters:
|
||||||
- gofmt
|
- gofmt
|
||||||
- gofumpt
|
- gofumpt
|
||||||
- goimports
|
- goimports
|
||||||
|
- testifylint
|
||||||
|
- intrange
|
||||||
|
- mirror
|
||||||
|
- perfsprint
|
||||||
|
- unconvert
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
enable-all: true
|
enable-all: true
|
||||||
disable:
|
disable:
|
||||||
- fieldalignment
|
- fieldalignment
|
||||||
- shadow
|
- shadow
|
||||||
|
settings:
|
||||||
|
printf:
|
||||||
|
funcs:
|
||||||
|
- (github.com/databricks/cli/internal/testutil.TestingT).Infof
|
||||||
|
- (github.com/databricks/cli/internal/testutil.TestingT).Errorf
|
||||||
|
- (github.com/databricks/cli/internal/testutil.TestingT).Fatalf
|
||||||
|
- (github.com/databricks/cli/internal/testutil.TestingT).Skipf
|
||||||
gofmt:
|
gofmt:
|
||||||
rewrite-rules:
|
rewrite-rules:
|
||||||
- pattern: 'a[b:len(a)]'
|
- pattern: 'a[b:len(a)]'
|
||||||
|
@ -32,7 +44,14 @@ linters-settings:
|
||||||
gofumpt:
|
gofumpt:
|
||||||
module-path: github.com/databricks/cli
|
module-path: github.com/databricks/cli
|
||||||
extra-rules: true
|
extra-rules: true
|
||||||
#goimports:
|
testifylint:
|
||||||
# local-prefixes: github.com/databricks/cli
|
enable-all: true
|
||||||
|
disable:
|
||||||
|
# good check, but we have too many assert.(No)?Errorf? so excluding for now
|
||||||
|
- require-error
|
||||||
|
copyloopvar:
|
||||||
|
check-alias: true
|
||||||
issues:
|
issues:
|
||||||
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
||||||
|
max-issues-per-linter: 1000
|
||||||
|
max-same-issues: 1000
|
||||||
|
|
73
CHANGELOG.md
73
CHANGELOG.md
|
@ -1,5 +1,78 @@
|
||||||
# Version changelog
|
# Version changelog
|
||||||
|
|
||||||
|
## [Release] Release v0.239.1
|
||||||
|
|
||||||
|
CLI:
|
||||||
|
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
|
||||||
|
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
|
||||||
|
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
|
||||||
|
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
|
||||||
|
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
|
||||||
|
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
|
||||||
|
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
|
||||||
|
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
|
||||||
|
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
|
||||||
|
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
|
||||||
|
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## [Release] Release v0.239.0
|
||||||
|
|
||||||
|
### New feature announcement
|
||||||
|
|
||||||
|
#### Databricks Apps support
|
||||||
|
|
||||||
|
You can now manage Databricks Apps using DABs by defining an `app` resource in your bundle configuration.
|
||||||
|
For more information see Databricks documentation https://docs.databricks.com/en/dev-tools/bundles/resources.html#app
|
||||||
|
|
||||||
|
#### Referencing complex variables in complex variables
|
||||||
|
|
||||||
|
You can now reference complex variables within other complex variables.
|
||||||
|
For more details see https://github.com/databricks/cli/pull/2157
|
||||||
|
|
||||||
|
CLI:
|
||||||
|
* Filter out system clusters in cluster picker ([#2131](https://github.com/databricks/cli/pull/2131)).
|
||||||
|
* Add command line flags for fields that are not in the API request body ([#2155](https://github.com/databricks/cli/pull/2155)).
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Added support for Databricks Apps in DABs ([#1928](https://github.com/databricks/cli/pull/1928)).
|
||||||
|
* Allow artifact path to be located outside the sync root ([#2128](https://github.com/databricks/cli/pull/2128)).
|
||||||
|
* Retry app deployment if there is an active deployment in progress ([#2153](https://github.com/databricks/cli/pull/2153)).
|
||||||
|
* Resolve variables in a loop ([#2164](https://github.com/databricks/cli/pull/2164)).
|
||||||
|
* Improve resolution of complex variables within complex variables ([#2157](https://github.com/databricks/cli/pull/2157)).
|
||||||
|
* Added output message to warn about slower deployments with apps ([#2161](https://github.com/databricks/cli/pull/2161)).
|
||||||
|
* Patch references to UC schemas to capture dependencies automatically ([#1989](https://github.com/databricks/cli/pull/1989)).
|
||||||
|
* Format default-python template ([#2110](https://github.com/databricks/cli/pull/2110)).
|
||||||
|
* Encourage the use of root_path in production to ensure single deployment ([#1712](https://github.com/databricks/cli/pull/1712)).
|
||||||
|
* Log warnings to stderr for "bundle validate -o json" ([#2109](https://github.com/databricks/cli/pull/2109)).
|
||||||
|
|
||||||
|
API Changes:
|
||||||
|
* Changed `databricks account federation-policy update` command with new required argument order.
|
||||||
|
* Changed `databricks account service-principal-federation-policy update` command with new required argument order.
|
||||||
|
|
||||||
|
OpenAPI commit 779817ed8d63031f5ea761fbd25ee84f38feec0d (2025-01-08)
|
||||||
|
Dependency updates:
|
||||||
|
* Upgrade TF provider to 1.63.0 ([#2162](https://github.com/databricks/cli/pull/2162)).
|
||||||
|
* Bump golangci-lint version to v1.63.4 from v1.63.1 ([#2114](https://github.com/databricks/cli/pull/2114)).
|
||||||
|
* Bump astral-sh/setup-uv from 4 to 5 ([#2116](https://github.com/databricks/cli/pull/2116)).
|
||||||
|
* Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 ([#2080](https://github.com/databricks/cli/pull/2080)).
|
||||||
|
* Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 ([#2079](https://github.com/databricks/cli/pull/2079)).
|
||||||
|
* Bump golang.org/x/term from 0.27.0 to 0.28.0 ([#2078](https://github.com/databricks/cli/pull/2078)).
|
||||||
|
* Bump github.com/databricks/databricks-sdk-go from 0.54.0 to 0.55.0 ([#2126](https://github.com/databricks/cli/pull/2126)).
|
||||||
|
|
||||||
|
## [Release] Release v0.238.0
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Fix finding Python within virtualenv on Windows ([#2034](https://github.com/databricks/cli/pull/2034)).
|
||||||
|
* Include missing field descriptions in JSON schema ([#2045](https://github.com/databricks/cli/pull/2045)).
|
||||||
|
* Add validation for volume referenced from `artifact_path` ([#2050](https://github.com/databricks/cli/pull/2050)).
|
||||||
|
* Handle `${workspace.file_path}` references in source-linked deployments ([#2046](https://github.com/databricks/cli/pull/2046)).
|
||||||
|
* Set the write bit for files written during template initialization ([#2068](https://github.com/databricks/cli/pull/2068)).
|
||||||
|
|
||||||
## [Release] Release v0.237.0
|
## [Release] Release v0.237.0
|
||||||
|
|
||||||
Bundles:
|
Bundles:
|
||||||
|
|
62
Makefile
62
Makefile
|
@ -1,38 +1,52 @@
|
||||||
default: build
|
default: vendor fmt lint
|
||||||
|
|
||||||
lint: vendor
|
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||||
@echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..."
|
|
||||||
@golangci-lint run --fix ./...
|
|
||||||
|
|
||||||
lintcheck: vendor
|
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
||||||
@echo "✓ Linting source code with https://golangci-lint.run/ ..."
|
GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
|
||||||
@golangci-lint run ./...
|
|
||||||
|
|
||||||
test: lint testonly
|
|
||||||
|
|
||||||
testonly:
|
lint:
|
||||||
@echo "✓ Running tests ..."
|
golangci-lint run --fix
|
||||||
@gotestsum --format pkgname-and-test-fails --no-summary=skipped --raw-command go test -v -json -short -coverprofile=coverage.txt ./...
|
|
||||||
|
|
||||||
coverage: test
|
lintcheck:
|
||||||
@echo "✓ Opening coverage for unit tests ..."
|
golangci-lint run ./...
|
||||||
@go tool cover -html=coverage.txt
|
|
||||||
|
# Note 'make lint' will do formatting as well. However, if there are compilation errors,
|
||||||
|
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
|
||||||
|
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
|
||||||
|
fmt:
|
||||||
|
ruff format -q
|
||||||
|
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
|
||||||
|
|
||||||
|
test:
|
||||||
|
${GOTESTSUM_CMD} -- ${PACKAGES}
|
||||||
|
|
||||||
|
cover:
|
||||||
|
rm -fr ./acceptance/build/cover/
|
||||||
|
CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
||||||
|
rm -fr ./acceptance/build/cover-merged/
|
||||||
|
mkdir -p acceptance/build/cover-merged/
|
||||||
|
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
||||||
|
go tool covdata textfmt -i acceptance/build/cover-merged -o coverage-acceptance.txt
|
||||||
|
|
||||||
|
showcover:
|
||||||
|
go tool cover -html=coverage.txt
|
||||||
|
|
||||||
|
acc-showcover:
|
||||||
|
go tool cover -html=coverage-acceptance.txt
|
||||||
|
|
||||||
build: vendor
|
build: vendor
|
||||||
@echo "✓ Building source code with go build ..."
|
go build -mod vendor
|
||||||
@go build -mod vendor
|
|
||||||
|
|
||||||
snapshot:
|
snapshot:
|
||||||
@echo "✓ Building dev snapshot"
|
go build -o .databricks/databricks
|
||||||
@go build -o .databricks/databricks
|
|
||||||
|
|
||||||
vendor:
|
vendor:
|
||||||
@echo "✓ Filling vendor folder with library code ..."
|
go mod vendor
|
||||||
@go mod vendor
|
|
||||||
|
|
||||||
schema:
|
schema:
|
||||||
@echo "✓ Generating json-schema ..."
|
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||||
@go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
|
||||||
|
|
||||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||||
|
|
||||||
|
@ -42,4 +56,4 @@ integration:
|
||||||
integration-short:
|
integration-short:
|
||||||
$(INTEGRATION) -short
|
$(INTEGRATION) -short
|
||||||
|
|
||||||
.PHONY: lint lintcheck test testonly coverage build snapshot vendor schema integration integration-short
|
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
|
||||||
|
|
4
NOTICE
4
NOTICE
|
@ -105,3 +105,7 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
|
||||||
https://github.com/hexops/gotextdiff
|
https://github.com/hexops/gotextdiff
|
||||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||||
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
||||||
|
|
||||||
|
https://github.com/BurntSushi/toml
|
||||||
|
Copyright (c) 2013 TOML authors
|
||||||
|
https://github.com/BurntSushi/toml/blob/master/COPYING
|
||||||
|
|
|
@ -0,0 +1,19 @@
|
||||||
|
Acceptance tests are blackbox tests that are run against compiled binary.
|
||||||
|
|
||||||
|
Currently these tests are run against "fake" HTTP server pretending to be Databricks API. However, they will be extended to run against real environment as regular integration tests.
|
||||||
|
|
||||||
|
To author a test,
|
||||||
|
- Add a new directory under `acceptance`. Any level of nesting is supported.
|
||||||
|
- Add `databricks.yml` there.
|
||||||
|
- Add `script` with commands to run, e.g. `$CLI bundle validate`. The test case is recognized by presence of `script`.
|
||||||
|
|
||||||
|
The test runner will run script and capture output and compare it with `output.txt` file in the same directory.
|
||||||
|
|
||||||
|
In order to write `output.txt` for the first time or overwrite it with the current output pass -update flag to go test.
|
||||||
|
|
||||||
|
The scripts are run with `bash -e` so any errors will be propagated. They are captured in `output.txt` by appending `Exit code: N` line at the end.
|
||||||
|
|
||||||
|
For more complex tests one can also use:
|
||||||
|
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
||||||
|
- `trace` helper: prints the arguments before executing the command.
|
||||||
|
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
|
@ -0,0 +1,481 @@
|
||||||
|
package acceptance_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
|
"slices"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/databricks/cli/internal/testutil"
|
||||||
|
"github.com/databricks/cli/libs/env"
|
||||||
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
var KeepTmp bool
|
||||||
|
|
||||||
|
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
|
||||||
|
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
|
||||||
|
// example: var SingleTest = "bundle/variables/empty"
|
||||||
|
var SingleTest = ""
|
||||||
|
|
||||||
|
// If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
|
||||||
|
// CLI commands. The $CLI in test scripts is a helper that just forwards command-line arguments to this server (see bin/callserver.py).
|
||||||
|
// Also disables parallelism in tests.
|
||||||
|
var InprocessMode bool
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
|
||||||
|
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
EntryPointScript = "script"
|
||||||
|
CleanupScript = "script.cleanup"
|
||||||
|
PrepareScript = "script.prepare"
|
||||||
|
)
|
||||||
|
|
||||||
|
var Scripts = map[string]bool{
|
||||||
|
EntryPointScript: true,
|
||||||
|
CleanupScript: true,
|
||||||
|
PrepareScript: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestAccept(t *testing.T) {
|
||||||
|
testAccept(t, InprocessMode, SingleTest)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestInprocessMode(t *testing.T) {
|
||||||
|
if InprocessMode {
|
||||||
|
t.Skip("Already tested by TestAccept")
|
||||||
|
}
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// - catalogs A catalog is the first layer of Unity Catalog’s three-level namespace.
|
||||||
|
// + catalogs A catalog is the first layer of Unity Catalog<6F>s three-level namespace.
|
||||||
|
t.Skip("Fails on CI on unicode characters")
|
||||||
|
}
|
||||||
|
require.NotZero(t, testAccept(t, true, "help"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
|
repls := testdiff.ReplacementsContext{}
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
||||||
|
|
||||||
|
if coverDir != "" {
|
||||||
|
require.NoError(t, os.MkdirAll(coverDir, os.ModePerm))
|
||||||
|
coverDir, err = filepath.Abs(coverDir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
t.Logf("Writing coverage to %s", coverDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
execPath := ""
|
||||||
|
|
||||||
|
if InprocessMode {
|
||||||
|
cmdServer := StartCmdServer(t)
|
||||||
|
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
|
||||||
|
execPath = filepath.Join(cwd, "bin", "callserver.py")
|
||||||
|
} else {
|
||||||
|
execPath = BuildCLI(t, cwd, coverDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Setenv("CLI", execPath)
|
||||||
|
repls.SetPath(execPath, "$CLI")
|
||||||
|
|
||||||
|
// Make helper scripts available
|
||||||
|
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||||
|
|
||||||
|
tempHomeDir := t.TempDir()
|
||||||
|
repls.SetPath(tempHomeDir, "$TMPHOME")
|
||||||
|
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||||
|
|
||||||
|
// Prevent CLI from downloading terraform in each test:
|
||||||
|
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
||||||
|
|
||||||
|
ctx := context.Background()
|
||||||
|
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||||
|
|
||||||
|
if cloudEnv == "" {
|
||||||
|
server := StartServer(t)
|
||||||
|
AddHandlers(server)
|
||||||
|
// Redirect API access to local server:
|
||||||
|
t.Setenv("DATABRICKS_HOST", server.URL)
|
||||||
|
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
||||||
|
|
||||||
|
homeDir := t.TempDir()
|
||||||
|
// Do not read user's ~/.databrickscfg
|
||||||
|
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
workspaceClient, err := databricks.NewWorkspaceClient()
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
user, err := workspaceClient.CurrentUser.Me(ctx)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.NotNil(t, user)
|
||||||
|
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
||||||
|
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||||
|
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||||
|
|
||||||
|
testDirs := getTests(t)
|
||||||
|
require.NotEmpty(t, testDirs)
|
||||||
|
|
||||||
|
if singleTest != "" {
|
||||||
|
testDirs = slices.DeleteFunc(testDirs, func(n string) bool {
|
||||||
|
return n != singleTest
|
||||||
|
})
|
||||||
|
require.NotEmpty(t, testDirs, "singleTest=%#v did not match any tests\n%#v", singleTest, testDirs)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, dir := range testDirs {
|
||||||
|
testName := strings.ReplaceAll(dir, "\\", "/")
|
||||||
|
t.Run(testName, func(t *testing.T) {
|
||||||
|
if !InprocessMode {
|
||||||
|
t.Parallel()
|
||||||
|
}
|
||||||
|
|
||||||
|
runTest(t, dir, coverDir, repls.Clone())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return len(testDirs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func getTests(t *testing.T) []string {
|
||||||
|
testDirs := make([]string, 0, 128)
|
||||||
|
|
||||||
|
err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
name := filepath.Base(path)
|
||||||
|
if name == EntryPointScript {
|
||||||
|
// Presence of 'script' marks a test case in this directory
|
||||||
|
testDirs = append(testDirs, filepath.Dir(path))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
sort.Strings(testDirs)
|
||||||
|
return testDirs
|
||||||
|
}
|
||||||
|
|
||||||
|
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
|
||||||
|
config, configPath := LoadConfig(t, dir)
|
||||||
|
|
||||||
|
isEnabled, isPresent := config.GOOS[runtime.GOOS]
|
||||||
|
if isPresent && !isEnabled {
|
||||||
|
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
var tmpDir string
|
||||||
|
var err error
|
||||||
|
if KeepTmp {
|
||||||
|
tempDirBase := filepath.Join(os.TempDir(), "acceptance")
|
||||||
|
_ = os.Mkdir(tempDirBase, 0o755)
|
||||||
|
tmpDir, err = os.MkdirTemp(tempDirBase, "")
|
||||||
|
require.NoError(t, err)
|
||||||
|
t.Logf("Created directory: %s", tmpDir)
|
||||||
|
} else {
|
||||||
|
tmpDir = t.TempDir()
|
||||||
|
}
|
||||||
|
|
||||||
|
repls.SetPathWithParents(tmpDir, "$TMPDIR")
|
||||||
|
|
||||||
|
scriptContents := readMergedScriptContents(t, dir)
|
||||||
|
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||||
|
|
||||||
|
inputs := make(map[string]bool, 2)
|
||||||
|
outputs := make(map[string]bool, 2)
|
||||||
|
err = CopyDir(dir, tmpDir, inputs, outputs)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||||
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
if coverDir != "" {
|
||||||
|
// Creating individual coverage directory for each test, because writing to the same one
|
||||||
|
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||||
|
// +error: coverage meta-data emit failed: writing ... rename .../tmp.covmeta.b3f... .../covmeta.b3f2c...: no such file or directory
|
||||||
|
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
||||||
|
err := os.MkdirAll(coverDir, os.ModePerm)
|
||||||
|
require.NoError(t, err)
|
||||||
|
cmd.Env = append(os.Environ(), "GOCOVERDIR="+coverDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write combined output to a file
|
||||||
|
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
||||||
|
require.NoError(t, err)
|
||||||
|
cmd.Stdout = out
|
||||||
|
cmd.Stderr = out
|
||||||
|
cmd.Dir = tmpDir
|
||||||
|
err = cmd.Run()
|
||||||
|
|
||||||
|
// Include exit code in output (if non-zero)
|
||||||
|
formatOutput(out, err)
|
||||||
|
require.NoError(t, out.Close())
|
||||||
|
|
||||||
|
// Compare expected outputs
|
||||||
|
for relPath := range outputs {
|
||||||
|
doComparison(t, repls, dir, tmpDir, relPath)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure there are not unaccounted for new files
|
||||||
|
files := ListDir(t, tmpDir)
|
||||||
|
for _, relPath := range files {
|
||||||
|
if _, ok := inputs[relPath]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok := outputs[relPath]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(relPath, "out") {
|
||||||
|
// We have a new file starting with "out"
|
||||||
|
// Show the contents & support overwrite mode for it:
|
||||||
|
doComparison(t, repls, dir, tmpDir, relPath)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string) {
|
||||||
|
pathRef := filepath.Join(dirRef, relPath)
|
||||||
|
pathNew := filepath.Join(dirNew, relPath)
|
||||||
|
bufRef, okRef := readIfExists(t, pathRef)
|
||||||
|
bufNew, okNew := readIfExists(t, pathNew)
|
||||||
|
if !okRef && !okNew {
|
||||||
|
t.Errorf("Both files are missing: %s, %s", pathRef, pathNew)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
valueRef := testdiff.NormalizeNewlines(string(bufRef))
|
||||||
|
valueNew := testdiff.NormalizeNewlines(string(bufNew))
|
||||||
|
|
||||||
|
// Apply replacements to the new value only.
|
||||||
|
// The reference value is stored after applying replacements.
|
||||||
|
valueNew = repls.Replace(valueNew)
|
||||||
|
|
||||||
|
// The test did not produce an expected output file.
|
||||||
|
if okRef && !okNew {
|
||||||
|
t.Errorf("Missing output file: %s", relPath)
|
||||||
|
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||||
|
if testdiff.OverwriteMode {
|
||||||
|
t.Logf("Removing output file: %s", relPath)
|
||||||
|
require.NoError(t, os.Remove(pathRef))
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// The test produced an unexpected output file.
|
||||||
|
if !okRef && okNew {
|
||||||
|
t.Errorf("Unexpected output file: %s", relPath)
|
||||||
|
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||||
|
if testdiff.OverwriteMode {
|
||||||
|
t.Logf("Writing output file: %s", relPath)
|
||||||
|
testutil.WriteFile(t, pathRef, valueNew)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare the reference and new values.
|
||||||
|
equal := testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||||
|
if !equal && testdiff.OverwriteMode {
|
||||||
|
t.Logf("Overwriting existing output file: %s", relPath)
|
||||||
|
testutil.WriteFile(t, pathRef, valueNew)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
||||||
|
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
|
||||||
|
func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
|
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
|
||||||
|
|
||||||
|
// Wrap script contents in a subshell such that changing the working
|
||||||
|
// directory only affects the main script and not cleanup.
|
||||||
|
scriptContents = "(\n" + scriptContents + ")\n"
|
||||||
|
|
||||||
|
prepares := []string{}
|
||||||
|
cleanups := []string{}
|
||||||
|
|
||||||
|
for {
|
||||||
|
x, ok := readIfExists(t, filepath.Join(dir, CleanupScript))
|
||||||
|
if ok {
|
||||||
|
cleanups = append(cleanups, string(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
x, ok = readIfExists(t, filepath.Join(dir, PrepareScript))
|
||||||
|
if ok {
|
||||||
|
prepares = append(prepares, string(x))
|
||||||
|
}
|
||||||
|
|
||||||
|
if dir == "" || dir == "." {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
dir = filepath.Dir(dir)
|
||||||
|
require.True(t, filepath.IsLocal(dir))
|
||||||
|
}
|
||||||
|
|
||||||
|
slices.Reverse(prepares)
|
||||||
|
prepares = append(prepares, scriptContents)
|
||||||
|
prepares = append(prepares, cleanups...)
|
||||||
|
return strings.Join(prepares, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
||||||
|
execPath := filepath.Join(cwd, "build", "databricks")
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
execPath += ".exe"
|
||||||
|
}
|
||||||
|
|
||||||
|
start := time.Now()
|
||||||
|
args := []string{
|
||||||
|
"go", "build",
|
||||||
|
"-mod", "vendor",
|
||||||
|
"-o", execPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
if coverDir != "" {
|
||||||
|
args = append(args, "-cover")
|
||||||
|
}
|
||||||
|
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// Get this error on my local Windows:
|
||||||
|
// error obtaining VCS status: exit status 128
|
||||||
|
// Use -buildvcs=false to disable VCS stamping.
|
||||||
|
args = append(args, "-buildvcs=false")
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
cmd.Dir = ".."
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
t.Logf("%s took %s", args, elapsed)
|
||||||
|
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
|
||||||
|
if len(out) > 0 {
|
||||||
|
t.Logf("go build output: %s: %s", args, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Quick check + warm up cache:
|
||||||
|
cmd = exec.Command(execPath, "--version")
|
||||||
|
out, err = cmd.CombinedOutput()
|
||||||
|
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
|
||||||
|
return execPath
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyFile(src, dst string) error {
|
||||||
|
in, err := os.Open(src)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer in.Close()
|
||||||
|
|
||||||
|
out, err := os.Create(dst)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer out.Close()
|
||||||
|
|
||||||
|
_, err = io.Copy(out, in)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func formatOutput(w io.Writer, err error) {
|
||||||
|
if err == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if exiterr, ok := err.(*exec.ExitError); ok {
|
||||||
|
exitCode := exiterr.ExitCode()
|
||||||
|
fmt.Fprintf(w, "\nExit code: %d\n", exitCode)
|
||||||
|
} else {
|
||||||
|
fmt.Fprintf(w, "\nError: %s\n", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func readIfExists(t *testing.T, path string) ([]byte, bool) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err == nil {
|
||||||
|
return data, true
|
||||||
|
}
|
||||||
|
|
||||||
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
|
t.Fatalf("%s: %s", path, err)
|
||||||
|
}
|
||||||
|
return []byte{}, false
|
||||||
|
}
|
||||||
|
|
||||||
|
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||||
|
return filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
name := info.Name()
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(src, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(relPath, "out") {
|
||||||
|
if !info.IsDir() {
|
||||||
|
outputs[relPath] = true
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
} else {
|
||||||
|
inputs[relPath] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := Scripts[name]; ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
destPath := filepath.Join(dst, relPath)
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return os.MkdirAll(destPath, info.Mode())
|
||||||
|
}
|
||||||
|
|
||||||
|
return copyFile(path, destPath)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func ListDir(t *testing.T, src string) []string {
|
||||||
|
var files []string
|
||||||
|
err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
// Do not FailNow here.
|
||||||
|
// The output comparison is happening after this call which includes output.txt which
|
||||||
|
// includes errors printed by commands which include explanation why a given file cannot be read.
|
||||||
|
t.Errorf("Error when listing %s: path=%s: %s", src, path, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if info.IsDir() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
relPath, err := filepath.Rel(src, path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, relPath)
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Failed to list %s: %s", src, err)
|
||||||
|
}
|
||||||
|
return files
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
import urllib.request
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
env = {}
|
||||||
|
for key, value in os.environ.items():
|
||||||
|
if len(value) > 10_000:
|
||||||
|
sys.stderr.write(f"Dropping key={key} value len={len(value)}\n")
|
||||||
|
continue
|
||||||
|
env[key] = value
|
||||||
|
|
||||||
|
q = {
|
||||||
|
"args": " ".join(sys.argv[1:]),
|
||||||
|
"cwd": os.getcwd(),
|
||||||
|
"env": json.dumps(env),
|
||||||
|
}
|
||||||
|
|
||||||
|
url = os.environ["CMD_SERVER_URL"] + "/?" + urlencode(q)
|
||||||
|
if len(url) > 100_000:
|
||||||
|
sys.exit("url too large")
|
||||||
|
|
||||||
|
resp = urllib.request.urlopen(url)
|
||||||
|
assert resp.status == 200, (resp.status, resp.url, resp.headers)
|
||||||
|
result = json.load(resp)
|
||||||
|
sys.stderr.write(result["stderr"])
|
||||||
|
sys.stdout.write(result["stdout"])
|
||||||
|
exitcode = int(result["exitcode"])
|
||||||
|
sys.exit(exitcode)
|
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Helper to sort blocks in text file. A block is a set of lines separated from others by empty line.
|
||||||
|
|
||||||
|
This is to workaround non-determinism in the output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
blocks = []
|
||||||
|
|
||||||
|
for line in sys.stdin:
|
||||||
|
if not line.strip():
|
||||||
|
if blocks and blocks[-1]:
|
||||||
|
blocks.append("")
|
||||||
|
continue
|
||||||
|
if not blocks:
|
||||||
|
blocks.append("")
|
||||||
|
blocks[-1] += line
|
||||||
|
|
||||||
|
blocks.sort()
|
||||||
|
print("\n".join(blocks))
|
|
@ -0,0 +1 @@
|
||||||
|
databricks
|
|
@ -0,0 +1,21 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle deploy --help
|
||||||
|
Deploy bundle
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle deploy [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--auto-approve Skip interactive approvals that might be required for deployment.
|
||||||
|
-c, --cluster-id string Override cluster in the deployment with the given cluster ID.
|
||||||
|
--fail-on-active-runs Fail if there are running jobs or pipelines in the deployment.
|
||||||
|
--force Force-override Git branch validation.
|
||||||
|
--force-lock Force acquisition of deployment lock.
|
||||||
|
-h, --help help for deploy
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle deploy --help
|
|
@ -0,0 +1,22 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle deployment --help
|
||||||
|
Deployment related commands
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle deployment [command]
|
||||||
|
|
||||||
|
Available Commands:
|
||||||
|
bind Bind bundle-defined resources to existing resources
|
||||||
|
unbind Unbind bundle-defined resources from its managed remote resource
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for deployment
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||||
|
|
||||||
|
Use "databricks bundle deployment [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle deployment --help
|
|
@ -0,0 +1,18 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle destroy --help
|
||||||
|
Destroy deployed bundle resources
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle destroy [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--auto-approve Skip interactive approvals for deleting resources and files
|
||||||
|
--force-lock Force acquisition of deployment lock.
|
||||||
|
-h, --help help for destroy
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle destroy --help
|
|
@ -0,0 +1,24 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle generate dashboard --help
|
||||||
|
Generate configuration for a dashboard
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle generate dashboard [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-s, --dashboard-dir string directory to write the dashboard representation to (default "src")
|
||||||
|
--existing-id string ID of the dashboard to generate configuration for
|
||||||
|
--existing-path string workspace path of the dashboard to generate configuration for
|
||||||
|
-f, --force force overwrite existing files in the output directory
|
||||||
|
-h, --help help for dashboard
|
||||||
|
--resource string resource key of dashboard to watch for changes
|
||||||
|
-d, --resource-dir string directory to write the configuration to (default "resources")
|
||||||
|
--watch watch for changes to the dashboard and update the configuration
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
--key string resource key to use for the generated configuration
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle generate dashboard --help
|
|
@ -0,0 +1,21 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle generate job --help
|
||||||
|
Generate bundle configuration for a job
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle generate job [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-d, --config-dir string Dir path where the output config will be stored (default "resources")
|
||||||
|
--existing-job-id int Job ID of the job to generate config for
|
||||||
|
-f, --force Force overwrite existing files in the output directory
|
||||||
|
-h, --help help for job
|
||||||
|
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
--key string resource key to use for the generated configuration
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle generate job --help
|
|
@ -0,0 +1,21 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle generate pipeline --help
|
||||||
|
Generate bundle configuration for a pipeline
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle generate pipeline [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-d, --config-dir string Dir path where the output config will be stored (default "resources")
|
||||||
|
--existing-pipeline-id string ID of the pipeline to generate config for
|
||||||
|
-f, --force Force overwrite existing files in the output directory
|
||||||
|
-h, --help help for pipeline
|
||||||
|
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
--key string resource key to use for the generated configuration
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle generate pipeline --help
|
|
@ -0,0 +1,25 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle generate --help
|
||||||
|
Generate bundle configuration
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle generate [command]
|
||||||
|
|
||||||
|
Available Commands:
|
||||||
|
app Generate bundle configuration for a Databricks app
|
||||||
|
dashboard Generate configuration for a dashboard
|
||||||
|
job Generate bundle configuration for a job
|
||||||
|
pipeline Generate bundle configuration for a pipeline
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for generate
|
||||||
|
--key string resource key to use for the generated configuration
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||||
|
|
||||||
|
Use "databricks bundle generate [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle generate --help
|
|
@ -0,0 +1,31 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle init --help
|
||||||
|
Initialize using a bundle template.
|
||||||
|
|
||||||
|
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
|
||||||
|
- default-python: The default Python template for Notebooks / Delta Live Tables / Workflows
|
||||||
|
- default-sql: The default SQL template for .sql files that run with Databricks SQL
|
||||||
|
- dbt-sql: The dbt SQL template (databricks.com/blog/delivering-cost-effective-data-real-time-dbt-and-databricks)
|
||||||
|
- mlops-stacks: The Databricks MLOps Stacks template (github.com/databricks/mlops-stacks)
|
||||||
|
- a local file system path with a template directory
|
||||||
|
- a Git repository URL, e.g. https://github.com/my/repository
|
||||||
|
|
||||||
|
See https://docs.databricks.com/en/dev-tools/bundles/templates.html for more information on templates.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle init [TEMPLATE_PATH] [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--branch string Git branch to use for template initialization
|
||||||
|
--config-file string JSON file containing key value pairs of input parameters required for template initialization.
|
||||||
|
-h, --help help for init
|
||||||
|
--output-dir string Directory to write the initialized template to.
|
||||||
|
--tag string Git tag to use for template initialization
|
||||||
|
--template-dir string Directory path within a Git repository containing the template.
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle init --help
|
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle open --help
|
||||||
|
Open a resource in the browser
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle open [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--force-pull Skip local cache and load the state from the remote workspace
|
||||||
|
-h, --help help for open
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle open --help
|
|
@ -0,0 +1,57 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle run --help
|
||||||
|
Run the job or pipeline identified by KEY.
|
||||||
|
|
||||||
|
The KEY is the unique identifier of the resource to run. In addition to
|
||||||
|
customizing the run using any of the available flags, you can also specify
|
||||||
|
keyword or positional arguments as shown in these examples:
|
||||||
|
|
||||||
|
databricks bundle run my_job -- --key1 value1 --key2 value2
|
||||||
|
|
||||||
|
Or:
|
||||||
|
|
||||||
|
databricks bundle run my_job -- value1 value2 value3
|
||||||
|
|
||||||
|
If the specified job uses job parameters or the job has a notebook task with
|
||||||
|
parameters, the first example applies and flag names are mapped to the
|
||||||
|
parameter names.
|
||||||
|
|
||||||
|
If the specified job does not use job parameters and the job has a Python file
|
||||||
|
task or a Python wheel task, the second example applies.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle run [flags] KEY
|
||||||
|
|
||||||
|
Job Flags:
|
||||||
|
--params stringToString comma separated k=v pairs for job parameters (default [])
|
||||||
|
|
||||||
|
Job Task Flags:
|
||||||
|
Note: please prefer use of job-level parameters (--param) over task-level parameters.
|
||||||
|
For more information, see https://docs.databricks.com/en/workflows/jobs/create-run-jobs.html#pass-parameters-to-a-databricks-job-task
|
||||||
|
--dbt-commands strings A list of commands to execute for jobs with DBT tasks.
|
||||||
|
--jar-params strings A list of parameters for jobs with Spark JAR tasks.
|
||||||
|
--notebook-params stringToString A map from keys to values for jobs with notebook tasks. (default [])
|
||||||
|
--pipeline-params stringToString A map from keys to values for jobs with pipeline tasks. (default [])
|
||||||
|
--python-named-params stringToString A map from keys to values for jobs with Python wheel tasks. (default [])
|
||||||
|
--python-params strings A list of parameters for jobs with Python tasks.
|
||||||
|
--spark-submit-params strings A list of parameters for jobs with Spark submit tasks.
|
||||||
|
--sql-params stringToString A map from keys to values for jobs with SQL tasks. (default [])
|
||||||
|
|
||||||
|
Pipeline Flags:
|
||||||
|
--full-refresh strings List of tables to reset and recompute.
|
||||||
|
--full-refresh-all Perform a full graph reset and recompute.
|
||||||
|
--refresh strings List of tables to update.
|
||||||
|
--refresh-all Perform a full graph update.
|
||||||
|
--validate-only Perform an update to validate graph correctness.
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for run
|
||||||
|
--no-wait Don't wait for the run to complete.
|
||||||
|
--restart Restart the run if it is already running.
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle run --help
|
|
@ -0,0 +1,16 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle schema --help
|
||||||
|
Generate JSON Schema for bundle configuration
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle schema [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for schema
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle schema --help
|
|
@ -0,0 +1,17 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle summary --help
|
||||||
|
Summarize resources deployed by this bundle
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle summary [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--force-pull Skip local cache and load the state from the remote workspace
|
||||||
|
-h, --help help for summary
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle summary --help
|
|
@ -0,0 +1,19 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle sync --help
|
||||||
|
Synchronize bundle tree to the workspace
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle sync [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--full perform full synchronization (default is incremental)
|
||||||
|
-h, --help help for sync
|
||||||
|
--interval duration file system polling interval (for --watch) (default 1s)
|
||||||
|
--output type type of the output format
|
||||||
|
--watch watch local file system for changes
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle sync --help
|
|
@ -0,0 +1,16 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate --help
|
||||||
|
Validate configuration
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle validate [flags]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for validate
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle validate --help
|
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle --help
|
||||||
|
Databricks Asset Bundles let you express data/AI/analytics projects as code.
|
||||||
|
|
||||||
|
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
databricks bundle [command]
|
||||||
|
|
||||||
|
Available Commands:
|
||||||
|
deploy Deploy bundle
|
||||||
|
deployment Deployment related commands
|
||||||
|
destroy Destroy deployed bundle resources
|
||||||
|
generate Generate bundle configuration
|
||||||
|
init Initialize using a bundle template
|
||||||
|
open Open a resource in the browser
|
||||||
|
run Run a job or pipeline update
|
||||||
|
schema Generate JSON Schema for bundle configuration
|
||||||
|
summary Summarize resources deployed by this bundle
|
||||||
|
sync Synchronize bundle tree to the workspace
|
||||||
|
validate Validate configuration
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
-h, --help help for bundle
|
||||||
|
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||||
|
|
||||||
|
Global Flags:
|
||||||
|
--debug enable debug logging
|
||||||
|
-o, --output type output type: text or json (default text)
|
||||||
|
-p, --profile string ~/.databrickscfg profile
|
||||||
|
-t, --target string bundle target to use (if applicable)
|
||||||
|
|
||||||
|
Use "databricks bundle [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
||||||
|
trace $CLI bundle --help
|
|
@ -0,0 +1,6 @@
|
||||||
|
bundle:
|
||||||
|
name: non_yaml_in_includes
|
||||||
|
|
||||||
|
include:
|
||||||
|
- test.py
|
||||||
|
- resources/*.yml
|
|
@ -0,0 +1,10 @@
|
||||||
|
Error: Files in the 'include' configuration section must be YAML files.
|
||||||
|
in databricks.yml:5:4
|
||||||
|
|
||||||
|
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
|
||||||
|
|
||||||
|
Name: non_yaml_in_includes
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle validate
|
|
@ -0,0 +1 @@
|
||||||
|
print("Hello world")
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: clusters
|
name: clusters
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
clusters:
|
clusters:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t default
|
||||||
|
{
|
||||||
|
"autoscale": {
|
||||||
|
"max_workers": 7,
|
||||||
|
"min_workers": 2
|
||||||
|
},
|
||||||
|
"cluster_name": "foo",
|
||||||
|
"custom_tags": {},
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 2,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.executor.memory": "2g"
|
||||||
|
},
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"autoscale": {
|
||||||
|
"max_workers": 3,
|
||||||
|
"min_workers": 1
|
||||||
|
},
|
||||||
|
"cluster_name": "foo-override",
|
||||||
|
"custom_tags": {},
|
||||||
|
"node_type_id": "m5.xlarge",
|
||||||
|
"num_workers": 3,
|
||||||
|
"spark_conf": {
|
||||||
|
"spark.executor.memory": "4g",
|
||||||
|
"spark.executor.memory2": "4g"
|
||||||
|
},
|
||||||
|
"spark_version": "15.2.x-scala2.12"
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t default | jq .resources.clusters.foo
|
||||||
|
trace $CLI bundle validate -o json -t development | jq .resources.clusters.foo
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_job_cluster
|
name: override_job_cluster
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,56 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq '.resources.jobs'
|
|
@ -0,0 +1,36 @@
|
||||||
|
bundle:
|
||||||
|
name: override_job_cluster
|
||||||
|
|
||||||
|
variables:
|
||||||
|
mykey:
|
||||||
|
default: key
|
||||||
|
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
name: job
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: key
|
||||||
|
new_cluster:
|
||||||
|
spark_version: 13.3.x-scala2.12
|
||||||
|
|
||||||
|
targets:
|
||||||
|
development:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: "${var.mykey}"
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: i3.xlarge
|
||||||
|
num_workers: 1
|
||||||
|
|
||||||
|
staging:
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
foo:
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: "${var.mykey}"
|
||||||
|
new_cluster:
|
||||||
|
node_type_id: i3.2xlarge
|
||||||
|
num_workers: 4
|
|
@ -0,0 +1,74 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t development
|
||||||
|
Name: override_job_cluster
|
||||||
|
Target: development
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"edit_mode": "UI_LOCKED",
|
||||||
|
"format": "MULTI_TASK",
|
||||||
|
"job_clusters": [
|
||||||
|
{
|
||||||
|
"job_cluster_key": "key",
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"name": "job",
|
||||||
|
"permissions": [],
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t staging
|
||||||
|
Name: override_job_cluster
|
||||||
|
Target: staging
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,4 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -t development
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq '.resources.jobs'
|
||||||
|
trace $CLI bundle validate -t staging
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_job_tasks
|
name: override_job_tasks
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
jobs:
|
jobs:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,6 @@
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json -t development
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,77 @@
|
||||||
|
{
|
||||||
|
"name": "job",
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test1.py"
|
||||||
|
},
|
||||||
|
"task_key": "key1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test2.py"
|
||||||
|
},
|
||||||
|
"task_key": "key2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -o json -t staging
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"name": "job",
|
||||||
|
"queue": {
|
||||||
|
"enabled": true
|
||||||
|
},
|
||||||
|
"tags": {},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test1.py"
|
||||||
|
},
|
||||||
|
"task_key": "key1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"new_cluster": {
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_version": "13.3.x-scala2.12"
|
||||||
|
},
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "./test3.py"
|
||||||
|
},
|
||||||
|
"task_key": "key2"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> errcode $CLI bundle validate -t staging
|
||||||
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
Name: override_job_tasks
|
||||||
|
Target: staging
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1,3 @@
|
||||||
|
trace errcode $CLI bundle validate -o json -t development 2> out.development.stderr.txt | jq .resources.jobs.foo
|
||||||
|
trace errcode $CLI bundle validate -o json -t staging | jq .resources.jobs.foo
|
||||||
|
trace errcode $CLI bundle validate -t staging
|
|
@ -0,0 +1,13 @@
|
||||||
|
bundle:
|
||||||
|
name: merge-string-map
|
||||||
|
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster: "hello"
|
||||||
|
|
||||||
|
targets:
|
||||||
|
dev:
|
||||||
|
resources:
|
||||||
|
clusters:
|
||||||
|
my_cluster:
|
||||||
|
spark_version: "25"
|
|
@ -0,0 +1,27 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t dev
|
||||||
|
Warning: expected map, found string
|
||||||
|
at resources.clusters.my_cluster
|
||||||
|
in databricks.yml:6:17
|
||||||
|
|
||||||
|
{
|
||||||
|
"clusters": {
|
||||||
|
"my_cluster": {
|
||||||
|
"custom_tags": {},
|
||||||
|
"spark_version": "25"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t dev
|
||||||
|
Warning: expected map, found string
|
||||||
|
at resources.clusters.my_cluster
|
||||||
|
in databricks.yml:6:17
|
||||||
|
|
||||||
|
Name: merge-string-map
|
||||||
|
Target: dev
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev
|
||||||
|
|
||||||
|
Found 1 warning
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t dev | jq .resources
|
||||||
|
trace $CLI bundle validate -t dev
|
|
@ -1,9 +1,6 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: override_pipeline_cluster
|
name: override_pipeline_cluster
|
||||||
|
|
||||||
workspace:
|
|
||||||
host: https://acme.cloud.databricks.com/
|
|
||||||
|
|
||||||
resources:
|
resources:
|
||||||
pipelines:
|
pipelines:
|
||||||
foo:
|
foo:
|
|
@ -0,0 +1,44 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t development
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"node_type_id": "i3.xlarge",
|
||||||
|
"num_workers": 1,
|
||||||
|
"spark_conf": {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "job",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -o json -t staging
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"clusters": [
|
||||||
|
{
|
||||||
|
"label": "default",
|
||||||
|
"node_type_id": "i3.2xlarge",
|
||||||
|
"num_workers": 4,
|
||||||
|
"spark_conf": {
|
||||||
|
"foo": "bar"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"deployment": {
|
||||||
|
"kind": "BUNDLE",
|
||||||
|
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||||
|
},
|
||||||
|
"name": "job",
|
||||||
|
"permissions": []
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,2 @@
|
||||||
|
trace $CLI bundle validate -o json -t development | jq .resources.pipelines
|
||||||
|
trace $CLI bundle validate -o json -t staging | jq .resources.pipelines
|
|
@ -1,5 +1,5 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: path_translation_nominal
|
name: fallback
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- "resources/*.yml"
|
- "resources/*.yml"
|
|
@ -0,0 +1,67 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"notebook_task": {
|
||||||
|
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook"
|
||||||
|
},
|
||||||
|
"task_key": "notebook_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py"
|
||||||
|
},
|
||||||
|
"task_key": "spark_python_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"dbt_task": {
|
||||||
|
"commands": [
|
||||||
|
"dbt run",
|
||||||
|
"dbt run"
|
||||||
|
],
|
||||||
|
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project"
|
||||||
|
},
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"task_key": "dbt_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"sql_task": {
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql"
|
||||||
|
},
|
||||||
|
"warehouse_id": "cafef00d"
|
||||||
|
},
|
||||||
|
"task_key": "sql_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"whl": "dist/wheel1.whl"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "dist/wheel2.whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"python_wheel_task": {
|
||||||
|
"package_name": "my_package"
|
||||||
|
},
|
||||||
|
"task_key": "python_wheel_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"jar": "target/jar1.jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"jar": "target/jar2.jar"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spark_jar_task": {
|
||||||
|
"main_class_name": "com.example.Main"
|
||||||
|
},
|
||||||
|
"task_key": "spark_jar_example"
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,22 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"notebook": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"notebook": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,16 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t development -o json
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t error
|
||||||
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
||||||
|
Name: fallback
|
||||||
|
Target: error
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -4,33 +4,45 @@ resources:
|
||||||
name: "placeholder"
|
name: "placeholder"
|
||||||
tasks:
|
tasks:
|
||||||
- task_key: notebook_example
|
- task_key: notebook_example
|
||||||
|
job_cluster_key: default
|
||||||
notebook_task:
|
notebook_task:
|
||||||
notebook_path: "this value is overridden"
|
notebook_path: "this value is overridden"
|
||||||
|
|
||||||
- task_key: spark_python_example
|
- task_key: spark_python_example
|
||||||
|
job_cluster_key: default
|
||||||
spark_python_task:
|
spark_python_task:
|
||||||
python_file: "this value is overridden"
|
python_file: "this value is overridden"
|
||||||
|
|
||||||
- task_key: dbt_example
|
- task_key: dbt_example
|
||||||
|
job_cluster_key: default
|
||||||
dbt_task:
|
dbt_task:
|
||||||
project_directory: "this value is overridden"
|
project_directory: "this value is overridden"
|
||||||
commands:
|
commands:
|
||||||
- "dbt run"
|
- "dbt run"
|
||||||
|
|
||||||
- task_key: sql_example
|
- task_key: sql_example
|
||||||
|
job_cluster_key: default
|
||||||
sql_task:
|
sql_task:
|
||||||
file:
|
file:
|
||||||
path: "this value is overridden"
|
path: "this value is overridden"
|
||||||
warehouse_id: cafef00d
|
warehouse_id: cafef00d
|
||||||
|
|
||||||
- task_key: python_wheel_example
|
- task_key: python_wheel_example
|
||||||
|
job_cluster_key: default
|
||||||
python_wheel_task:
|
python_wheel_task:
|
||||||
package_name: my_package
|
package_name: my_package
|
||||||
libraries:
|
libraries:
|
||||||
- whl: ../dist/wheel1.whl
|
- whl: ../dist/wheel1.whl
|
||||||
|
|
||||||
- task_key: spark_jar_example
|
- task_key: spark_jar_example
|
||||||
|
job_cluster_key: default
|
||||||
spark_jar_task:
|
spark_jar_task:
|
||||||
main_class_name: com.example.Main
|
main_class_name: com.example.Main
|
||||||
libraries:
|
libraries:
|
||||||
- jar: ../target/jar1.jar
|
- jar: ../target/jar1.jar
|
||||||
|
|
||||||
|
# Include a job cluster for completeness
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: default
|
||||||
|
new_cluster:
|
||||||
|
spark_version: 15.4.x-scala2.12
|
|
@ -0,0 +1,10 @@
|
||||||
|
errcode trace $CLI bundle validate -t development -o json > output.tmp.json
|
||||||
|
|
||||||
|
# Capture job tasks
|
||||||
|
jq '.resources.jobs.my_job.tasks' output.tmp.json > output.job.json
|
||||||
|
|
||||||
|
# Capture pipeline libraries
|
||||||
|
jq '.resources.pipelines.my_pipeline.libraries' output.tmp.json > output.pipeline.json
|
||||||
|
|
||||||
|
# Expect failure for the "error" target
|
||||||
|
errcode trace $CLI bundle validate -t error
|
|
@ -0,0 +1 @@
|
||||||
|
rm -f output.tmp.json
|
|
@ -1,5 +1,5 @@
|
||||||
bundle:
|
bundle:
|
||||||
name: path_translation_fallback
|
name: nominal
|
||||||
|
|
||||||
include:
|
include:
|
||||||
- "resources/*.yml"
|
- "resources/*.yml"
|
|
@ -0,0 +1,89 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"notebook_task": {
|
||||||
|
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
|
||||||
|
},
|
||||||
|
"task_key": "notebook_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
|
||||||
|
},
|
||||||
|
"task_key": "spark_python_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"dbt_task": {
|
||||||
|
"commands": [
|
||||||
|
"dbt run",
|
||||||
|
"dbt run"
|
||||||
|
],
|
||||||
|
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project"
|
||||||
|
},
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"task_key": "dbt_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"sql_task": {
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql"
|
||||||
|
},
|
||||||
|
"warehouse_id": "cafef00d"
|
||||||
|
},
|
||||||
|
"task_key": "sql_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"whl": "dist/wheel1.whl"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"whl": "dist/wheel2.whl"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"python_wheel_task": {
|
||||||
|
"package_name": "my_package"
|
||||||
|
},
|
||||||
|
"task_key": "python_wheel_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"libraries": [
|
||||||
|
{
|
||||||
|
"jar": "target/jar1.jar"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"jar": "target/jar2.jar"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"spark_jar_task": {
|
||||||
|
"main_class_name": "com.example.Main"
|
||||||
|
},
|
||||||
|
"task_key": "spark_jar_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"for_each_task": {
|
||||||
|
"task": {
|
||||||
|
"notebook_task": {
|
||||||
|
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"task_key": "for_each_notebook_example"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"for_each_task": {
|
||||||
|
"task": {
|
||||||
|
"job_cluster_key": "default",
|
||||||
|
"spark_python_task": {
|
||||||
|
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"task_key": "for_each_spark_python_example"
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,22 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"notebook": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"file": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"notebook": {
|
||||||
|
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,16 @@
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t development -o json
|
||||||
|
|
||||||
|
>>> $CLI bundle validate -t error
|
||||||
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
||||||
|
Name: nominal
|
||||||
|
Target: error
|
||||||
|
Workspace:
|
||||||
|
User: $USERNAME
|
||||||
|
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue