mirror of https://github.com/databricks/cli.git
Compare commits
171 Commits
757f8b95c0
...
b938d567b9
Author | SHA1 | Date |
---|---|---|
|
b938d567b9 | |
|
878fa80322 | |
|
8d849fe868 | |
|
f2096eddcc | |
|
e81ec4ee23 | |
|
6f3dbaec4c | |
|
f6c50a6318 | |
|
f7a45d0c7e | |
|
4bc231ad4f | |
|
6953a84db6 | |
|
ddedc4272d | |
|
d282f33a22 | |
|
047691dd91 | |
|
ee440e65fe | |
|
4ebc86282f | |
|
cc07380185 | |
|
2175dd24a4 | |
|
06e342afc5 | |
|
f8aaa7fce3 | |
|
2a97dcaa45 | |
|
6d83ffd109 | |
|
989aabe5f1 | |
|
5aa89230e9 | |
|
ff4a5c2269 | |
|
f71583fbc0 | |
|
6b1a778fe1 | |
|
ecb816446e | |
|
ecc05689ca | |
|
65ac9a336a | |
|
54e16d5f62 | |
|
75127fe42e | |
|
27eb0c4072 | |
|
e0903fbd37 | |
|
27caf413f2 | |
|
5c90752797 | |
|
57b8d336e0 | |
|
1678503cb0 | |
|
2e1455841c | |
|
dcc61cd763 | |
|
84b694f2a1 | |
|
d86ad91899 | |
|
07efe83023 | |
|
2eb9abb5ee | |
|
9320bd1682 | |
|
838de2fde2 | |
|
75db82ae1f | |
|
4f3a289333 | |
|
75932198f7 | |
|
91e04cc444 | |
|
f267318bb9 | |
|
fcedfe4c78 | |
|
2f798c4ded | |
|
e5730bf57e | |
|
787dbe9099 | |
|
3c6eacb05b | |
|
f1efbd7d9f | |
|
a03ea73011 | |
|
58ef34f320 | |
|
55c03cc119 | |
|
ce965b22b2 | |
|
38efedcd73 | |
|
c3a6e11627 | |
|
13596eb605 | |
|
ec7808da34 | |
|
59d6fbfee9 | |
|
708c4fbb7a | |
|
30f57d3b49 | |
|
884b5f26ed | |
|
124515e8d2 | |
|
413ca5c134 | |
|
099e9bed0f | |
|
4ba222ab36 | |
|
0256225408 | |
|
5971bd5c1a | |
|
65e4f79dfe | |
|
3ffac80007 | |
|
11436faafe | |
|
60709e3d48 | |
|
be908ee1a1 | |
|
67d1413db5 | |
|
52bf7e388a | |
|
65fbbd9a7c | |
|
4595c6f1b5 | |
|
b7dd70b8b3 | |
|
6e8f0ea8af | |
|
1cb32eca90 | |
|
82b0dd36d6 | |
|
b3d98fe666 | |
|
468660dc45 | |
|
f65508690d | |
|
959e43e556 | |
|
a47a058506 | |
|
b4ed235104 | |
|
d6d9b994d4 | |
|
d784147e99 | |
|
0487e816cc | |
|
8af9efaa62 | |
|
6153423c56 | |
|
ddd45e25ee | |
|
1f63aa0912 | |
|
798189eb96 | |
|
f60ad32f07 | |
|
ba3a400327 | |
|
20c1902a45 | |
|
3d91691f25 | |
|
54a470837c | |
|
667302b61b | |
|
6c3ddbd921 | |
|
876526a19a | |
|
e9902036b8 | |
|
c224be5c1f | |
|
fde30ff1ab | |
|
3a32c63919 | |
|
34a37cf4a8 | |
|
de5155ed0a | |
|
33613b5d2a | |
|
41bbd89257 | |
|
ee4a4b4c24 | |
|
84a73052d2 | |
|
69f3c0a869 | |
|
395a04a8d1 | |
|
e6982d09ac | |
|
41a21af556 | |
|
31c10c1b82 | |
|
7034793d1d | |
|
64fc1c8fe7 | |
|
26f527ef64 | |
|
50f62692ce | |
|
0d5193a62c | |
|
cff4f09cc8 | |
|
0c088d4050 | |
|
560c3d352e | |
|
89eb556318 | |
|
9061635789 | |
|
2cd0d88bdd | |
|
511c8887a8 | |
|
2e70558dc1 | |
|
f2bba632cb | |
|
fa87f22706 | |
|
bc1610f6e6 | |
|
98244606b3 | |
|
8f34fc7961 | |
|
b273dc5942 | |
|
a002a24e41 | |
|
30dec59781 | |
|
39b03592d7 | |
|
d53a78e926 | |
|
20179457b9 | |
|
581565a1c4 | |
|
dd554412a6 | |
|
626045a17e | |
|
40e96b5af2 | |
|
983a8a6633 | |
|
b76eee0e8c | |
|
5592fa889e | |
|
25f8ee8d66 | |
|
55494a0bda | |
|
cc44e368b8 | |
|
6a7eefa54b | |
|
82e35530b0 | |
|
72e677d0ac | |
|
fca6abdfac | |
|
ccb2599b42 | |
|
a5e09ab28a | |
|
2ae2b7e8c8 | |
|
fe31e4d02e | |
|
98a1e73a0f | |
|
2b452973f3 | |
|
5d9bc3b553 | |
|
e682eeba80 | |
|
e1f5f60a8d |
|
@ -1 +1 @@
|
|||
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
|
||||
c72c58f97b950fcb924a90ef164bcb10cfcd5ece
|
|
@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
|
|||
{{- end }}
|
||||
)
|
||||
|
||||
{{- $excludeFromJson := list "http-request"}}
|
||||
|
||||
func new{{.PascalName}}() *cobra.Command {
|
||||
cmd := &cobra.Command{}
|
||||
|
||||
{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
|
||||
{{- if .Request}}
|
||||
|
||||
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
|
||||
{{- if .RequestBodyField }}
|
||||
{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
|
||||
{{- end }}
|
||||
{{- if .CanUseJson}}
|
||||
{{- if $canUseJson}}
|
||||
var {{.CamelName}}Json flags.JsonFlag
|
||||
{{- end}}
|
||||
{{- end}}
|
||||
|
@ -135,14 +138,14 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
{{- $request = .RequestBodyField.Entity -}}
|
||||
{{- end -}}
|
||||
{{if $request }}// TODO: short flags
|
||||
{{- if .CanUseJson}}
|
||||
{{- if $canUseJson}}
|
||||
cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||
{{- end}}
|
||||
{{$method := .}}
|
||||
{{ if not .IsJsonOnly }}
|
||||
{{range $request.Fields -}}
|
||||
{{range .AllFields -}}
|
||||
{{- if not .Required -}}
|
||||
{{if .Entity.IsObject }}// TODO: complex arg: {{.Name}}
|
||||
{{if .Entity.IsObject}}{{if not (eq . $method.RequestBodyField) }}// TODO: complex arg: {{.Name}}{{end}}
|
||||
{{else if .Entity.IsAny }}// TODO: any: {{.Name}}
|
||||
{{else if .Entity.ArrayValue }}// TODO: array: {{.Name}}
|
||||
{{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}}
|
||||
|
@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
|
||||
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
|
||||
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
|
||||
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
|
||||
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
|
||||
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
|
||||
|
||||
{{- $atleastOneArgumentWithDescription := false -}}
|
||||
|
@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
ctx := cmd.Context()
|
||||
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
|
||||
{{- if .Request }}
|
||||
{{ if .CanUseJson }}
|
||||
{{ if $canUseJson }}
|
||||
if cmd.Flags().Changed("json") {
|
||||
diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
|
||||
if diags.HasError() {
|
||||
|
@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
||||
}{{- end}}
|
||||
{{- if $hasPosArgs }}
|
||||
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
|
||||
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
|
||||
{{- end}}
|
||||
{{- if $hasIdPrompt}}
|
||||
if len(args) == 0 {
|
||||
|
@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
|
||||
{{$method := .}}
|
||||
{{- range $arg, $field := .RequiredPositionalArguments}}
|
||||
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
|
||||
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
|
||||
{{- end -}}
|
||||
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
|
||||
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
|
||||
}
|
||||
{{- end}}
|
||||
|
||||
|
@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
{{- $method := .Method -}}
|
||||
{{- $arg := .Arg -}}
|
||||
{{- $hasIdPrompt := .HasIdPrompt -}}
|
||||
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
|
||||
{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
|
||||
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
|
||||
{{- if $optionalIfJsonIsUsed }}
|
||||
if !cmd.Flags().Changed("json") {
|
||||
{{- end }}
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
cmd/account/access-control/access-control.go linguist-generated=true
|
||||
cmd/account/billable-usage/billable-usage.go linguist-generated=true
|
||||
cmd/account/budget-policy/budget-policy.go linguist-generated=true
|
||||
cmd/account/budgets/budgets.go linguist-generated=true
|
||||
cmd/account/cmd.go linguist-generated=true
|
||||
cmd/account/credentials/credentials.go linguist-generated=true
|
||||
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
|
||||
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
|
||||
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
|
||||
cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated=true
|
||||
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
|
||||
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
|
||||
cmd/account/federation-policy/federation-policy.go linguist-generated=true
|
||||
|
@ -31,6 +33,7 @@ cmd/account/users/users.go linguist-generated=true
|
|||
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
|
||||
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
|
||||
cmd/account/workspaces/workspaces.go linguist-generated=true
|
||||
cmd/workspace/access-control/access-control.go linguist-generated=true
|
||||
cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
|
||||
cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
|
||||
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
|
||||
|
@ -74,6 +77,7 @@ cmd/workspace/instance-pools/instance-pools.go linguist-generated=true
|
|||
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
|
||||
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
|
||||
cmd/workspace/jobs/jobs.go linguist-generated=true
|
||||
cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true
|
||||
cmd/workspace/lakeview/lakeview.go linguist-generated=true
|
||||
cmd/workspace/libraries/libraries.go linguist-generated=true
|
||||
cmd/workspace/metastores/metastores.go linguist-generated=true
|
||||
|
@ -98,11 +102,13 @@ cmd/workspace/providers/providers.go linguist-generated=true
|
|||
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
|
||||
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
|
||||
cmd/workspace/queries/queries.go linguist-generated=true
|
||||
cmd/workspace/query-execution/query-execution.go linguist-generated=true
|
||||
cmd/workspace/query-history/query-history.go linguist-generated=true
|
||||
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
|
||||
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
|
||||
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
|
||||
cmd/workspace/recipients/recipients.go linguist-generated=true
|
||||
cmd/workspace/redash-config/redash-config.go linguist-generated=true
|
||||
cmd/workspace/registered-models/registered-models.go linguist-generated=true
|
||||
cmd/workspace/repos/repos.go linguist-generated=true
|
||||
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
* @pietern @andrewnester @shreyas-goenka @denik
|
||||
cmd/labs @alexott @nfx
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
||||
with:
|
||||
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
||||
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
||||
|
|
|
@ -25,7 +25,7 @@ jobs:
|
|||
if: "${{ github.event.pull_request.head.repo.fork }}"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Delete old comments
|
||||
env:
|
||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||
with:
|
||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||
|
|
|
@ -23,7 +23,7 @@ jobs:
|
|||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||
with:
|
||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||
|
|
|
@ -10,19 +10,65 @@ on:
|
|||
jobs:
|
||||
publish-to-winget-pkgs:
|
||||
runs-on:
|
||||
group: databricks-protected-runner-group
|
||||
labels: windows-server-latest
|
||||
group: databricks-deco-testing-runner-group
|
||||
labels: ubuntu-latest-deco
|
||||
|
||||
environment: release
|
||||
|
||||
steps:
|
||||
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # https://github.com/vedantmgoyal2009/winget-releaser/releases/tag/v2
|
||||
with:
|
||||
identifier: Databricks.DatabricksCLI
|
||||
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
||||
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||
fork-user: eng-dev-ecosystem-bot
|
||||
- name: Checkout repository and submodules
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
# Use the tag from the input, or the ref name if the input is not provided.
|
||||
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||
release-tag: ${{ inputs.tag || github.ref_name }}
|
||||
# When updating the version of komac, make sure to update the checksum in the next step.
|
||||
# Find both at https://github.com/russellbanks/Komac/releases.
|
||||
- name: Download komac binary
|
||||
run: |
|
||||
curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
|
||||
|
||||
- name: Verify komac binary
|
||||
run: |
|
||||
echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
|
||||
|
||||
- name: Untar komac binary to temporary path
|
||||
run: |
|
||||
mkdir -p $RUNNER_TEMP/komac
|
||||
tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
|
||||
|
||||
- name: Add komac to PATH
|
||||
run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
|
||||
|
||||
- name: Confirm komac version
|
||||
run: komac --version
|
||||
|
||||
# Use the tag from the input, or the ref name if the input is not provided.
|
||||
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||
- name: Strip "v" prefix from version
|
||||
id: strip_version
|
||||
run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Get URLs of signed Windows binaries
|
||||
id: get_windows_urls
|
||||
run: |
|
||||
urls=$(
|
||||
gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
|
||||
jq -r .assets[].browser_download_url | \
|
||||
grep -E '_windows_.*-signed\.zip$' | \
|
||||
tr '\n' ' '
|
||||
)
|
||||
if [ -z "$urls" ]; then
|
||||
echo "No signed Windows binaries found" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "urls=$urls" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Publish to Winget
|
||||
run: |
|
||||
komac update Databricks.DatabricksCLI \
|
||||
--version ${{ steps.strip_version.outputs.version }} \
|
||||
--submit \
|
||||
--urls ${{ steps.get_windows_urls.outputs.urls }} \
|
||||
env:
|
||||
KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
|
||||
GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||
|
|
|
@ -45,20 +45,20 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository and submodules
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v5
|
||||
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
||||
|
||||
- name: Set go env
|
||||
run: |
|
||||
|
@ -71,18 +71,18 @@ jobs:
|
|||
make vendor
|
||||
pip3 install wheel
|
||||
|
||||
- name: Run tests
|
||||
run: make test
|
||||
- name: Run tests with coverage
|
||||
run: make cover
|
||||
|
||||
golangci:
|
||||
linters:
|
||||
needs: cleanups
|
||||
name: lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-go@v5
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
# Use different schema from regular job, to avoid overwriting the same key
|
||||
cache-dependency-path: |
|
||||
go.sum
|
||||
|
@ -95,10 +95,15 @@ jobs:
|
|||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||
git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@v6
|
||||
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
|
||||
with:
|
||||
version: v1.63.4
|
||||
args: --timeout=15m
|
||||
- name: Run ruff
|
||||
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
|
||||
with:
|
||||
version: "0.9.1"
|
||||
args: "format --check"
|
||||
|
||||
validate-bundle-schema:
|
||||
needs: cleanups
|
||||
|
@ -106,12 +111,12 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
# Use different schema from regular job, to avoid overwriting the same key
|
||||
cache-dependency-path: |
|
||||
go.sum
|
||||
|
|
|
@ -26,15 +26,15 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository and submodules
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
@ -48,27 +48,27 @@ jobs:
|
|||
|
||||
- name: Run GoReleaser
|
||||
id: releaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||
with:
|
||||
version: ~> v2
|
||||
args: release --snapshot --skip docker
|
||||
|
||||
- name: Upload macOS binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: cli_darwin_snapshot
|
||||
path: |
|
||||
dist/*_darwin_*/
|
||||
|
||||
- name: Upload Linux binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: cli_linux_snapshot
|
||||
path: |
|
||||
dist/*_linux_*/
|
||||
|
||||
- name: Upload Windows binaries
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
with:
|
||||
name: cli_windows_snapshot
|
||||
path: |
|
||||
|
@ -88,7 +88,7 @@ jobs:
|
|||
# Snapshot release may only be updated for commits to the main branch.
|
||||
if: github.ref == 'refs/heads/main'
|
||||
|
||||
uses: softprops/action-gh-release@v1
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
with:
|
||||
name: Snapshot
|
||||
prerelease: true
|
||||
|
|
|
@ -18,15 +18,15 @@ jobs:
|
|||
|
||||
steps:
|
||||
- name: Checkout repository and submodules
|
||||
uses: actions/checkout@v4
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
fetch-tags: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
@ -37,7 +37,7 @@ jobs:
|
|||
|
||||
# Log into the GitHub Container Registry. The goreleaser action will create
|
||||
# the docker images and push them to the GitHub Container Registry.
|
||||
- uses: "docker/login-action@v3"
|
||||
- uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry: "ghcr.io"
|
||||
username: "${{ github.actor }}"
|
||||
|
@ -46,11 +46,11 @@ jobs:
|
|||
# QEMU is required to build cross platform docker images using buildx.
|
||||
# It allows virtualization of the CPU architecture at the application level.
|
||||
- name: Set up QEMU dependency
|
||||
uses: docker/setup-qemu-action@v3
|
||||
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||
|
||||
- name: Run GoReleaser
|
||||
id: releaser
|
||||
uses: goreleaser/goreleaser-action@v6
|
||||
uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf # v6.1.0
|
||||
with:
|
||||
version: ~> v2
|
||||
args: release
|
||||
|
@ -71,7 +71,7 @@ jobs:
|
|||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||
|
||||
- name: Update setup-cli
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
@ -99,7 +99,7 @@ jobs:
|
|||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||
|
||||
- name: Update homebrew-tap
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
@ -140,7 +140,7 @@ jobs:
|
|||
echo "VERSION=${VERSION:1}" >> $GITHUB_ENV
|
||||
|
||||
- name: Update CLI version in the VSCode extension
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1
|
||||
with:
|
||||
github-token: ${{ secrets.DECO_GITHUB_TOKEN }}
|
||||
script: |
|
||||
|
|
|
@ -20,14 +20,12 @@ dist/
|
|||
|
||||
*.log
|
||||
coverage.txt
|
||||
coverage-acceptance.txt
|
||||
|
||||
__pycache__
|
||||
*.pyc
|
||||
|
||||
.terraform
|
||||
.terraform.lock.hcl
|
||||
|
||||
.vscode/launch.json
|
||||
.vscode/tasks.json
|
||||
|
||||
.databricks
|
||||
.ruff_cache
|
||||
|
|
|
@ -15,12 +15,20 @@ linters:
|
|||
- intrange
|
||||
- mirror
|
||||
- perfsprint
|
||||
- unconvert
|
||||
linters-settings:
|
||||
govet:
|
||||
enable-all: true
|
||||
disable:
|
||||
- fieldalignment
|
||||
- shadow
|
||||
settings:
|
||||
printf:
|
||||
funcs:
|
||||
- (github.com/databricks/cli/internal/testutil.TestingT).Infof
|
||||
- (github.com/databricks/cli/internal/testutil.TestingT).Errorf
|
||||
- (github.com/databricks/cli/internal/testutil.TestingT).Fatalf
|
||||
- (github.com/databricks/cli/internal/testutil.TestingT).Skipf
|
||||
gofmt:
|
||||
rewrite-rules:
|
||||
- pattern: 'a[b:len(a)]'
|
||||
|
@ -41,6 +49,8 @@ linters-settings:
|
|||
disable:
|
||||
# good check, but we have too many assert.(No)?Errorf? so excluding for now
|
||||
- require-error
|
||||
copyloopvar:
|
||||
check-alias: true
|
||||
issues:
|
||||
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/
|
||||
max-issues-per-linter: 1000
|
||||
|
|
84
CHANGELOG.md
84
CHANGELOG.md
|
@ -1,5 +1,89 @@
|
|||
# Version changelog
|
||||
|
||||
## [Release] Release v0.240.0
|
||||
|
||||
Bundles:
|
||||
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
|
||||
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
|
||||
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
|
||||
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
|
||||
|
||||
API Changes:
|
||||
* Added `databricks access-control` command group.
|
||||
* Added `databricks serving-endpoints http-request` command.
|
||||
* Changed `databricks serving-endpoints create` command with new required argument order.
|
||||
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
|
||||
* Changed `databricks recipients update` command return type to become non-empty.
|
||||
|
||||
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
|
||||
Dependency updates:
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
|
||||
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
|
||||
|
||||
## [Release] Release v0.239.1
|
||||
|
||||
CLI:
|
||||
* Added text output templates for apps list and list-deployments ([#2175](https://github.com/databricks/cli/pull/2175)).
|
||||
* Fix duplicate "apps" entry in help output ([#2191](https://github.com/databricks/cli/pull/2191)).
|
||||
|
||||
Bundles:
|
||||
* Allow yaml-anchors in schema ([#2200](https://github.com/databricks/cli/pull/2200)).
|
||||
* Show an error when non-yaml files used in include section ([#2201](https://github.com/databricks/cli/pull/2201)).
|
||||
* Set WorktreeRoot to sync root outside git repo ([#2197](https://github.com/databricks/cli/pull/2197)).
|
||||
* fix: Detailed message for using source-linked deployment with file_path specified ([#2119](https://github.com/databricks/cli/pull/2119)).
|
||||
* Allow using variables in enum fields ([#2199](https://github.com/databricks/cli/pull/2199)).
|
||||
* Add experimental-jobs-as-code template ([#2177](https://github.com/databricks/cli/pull/2177)).
|
||||
* Reading variables from file ([#2171](https://github.com/databricks/cli/pull/2171)).
|
||||
* Fixed an apps message order and added output test ([#2174](https://github.com/databricks/cli/pull/2174)).
|
||||
* Default to forward slash-separated paths for path translation ([#2145](https://github.com/databricks/cli/pull/2145)).
|
||||
* Include a materialized copy of built-in templates ([#2146](https://github.com/databricks/cli/pull/2146)).
|
||||
|
||||
|
||||
|
||||
## [Release] Release v0.239.0
|
||||
|
||||
### New feature announcement
|
||||
|
||||
#### Databricks Apps support
|
||||
|
||||
You can now manage Databricks Apps using DABs by defining an `app` resource in your bundle configuration.
|
||||
For more information see Databricks documentation https://docs.databricks.com/en/dev-tools/bundles/resources.html#app
|
||||
|
||||
#### Referencing complex variables in complex variables
|
||||
|
||||
You can now reference complex variables within other complex variables.
|
||||
For more details see https://github.com/databricks/cli/pull/2157
|
||||
|
||||
CLI:
|
||||
* Filter out system clusters in cluster picker ([#2131](https://github.com/databricks/cli/pull/2131)).
|
||||
* Add command line flags for fields that are not in the API request body ([#2155](https://github.com/databricks/cli/pull/2155)).
|
||||
|
||||
Bundles:
|
||||
* Added support for Databricks Apps in DABs ([#1928](https://github.com/databricks/cli/pull/1928)).
|
||||
* Allow artifact path to be located outside the sync root ([#2128](https://github.com/databricks/cli/pull/2128)).
|
||||
* Retry app deployment if there is an active deployment in progress ([#2153](https://github.com/databricks/cli/pull/2153)).
|
||||
* Resolve variables in a loop ([#2164](https://github.com/databricks/cli/pull/2164)).
|
||||
* Improve resolution of complex variables within complex variables ([#2157](https://github.com/databricks/cli/pull/2157)).
|
||||
* Added output message to warn about slower deployments with apps ([#2161](https://github.com/databricks/cli/pull/2161)).
|
||||
* Patch references to UC schemas to capture dependencies automatically ([#1989](https://github.com/databricks/cli/pull/1989)).
|
||||
* Format default-python template ([#2110](https://github.com/databricks/cli/pull/2110)).
|
||||
* Encourage the use of root_path in production to ensure single deployment ([#1712](https://github.com/databricks/cli/pull/1712)).
|
||||
* Log warnings to stderr for "bundle validate -o json" ([#2109](https://github.com/databricks/cli/pull/2109)).
|
||||
|
||||
API Changes:
|
||||
* Changed `databricks account federation-policy update` command with new required argument order.
|
||||
* Changed `databricks account service-principal-federation-policy update` command with new required argument order.
|
||||
|
||||
OpenAPI commit 779817ed8d63031f5ea761fbd25ee84f38feec0d (2025-01-08)
|
||||
Dependency updates:
|
||||
* Upgrade TF provider to 1.63.0 ([#2162](https://github.com/databricks/cli/pull/2162)).
|
||||
* Bump golangci-lint version to v1.63.4 from v1.63.1 ([#2114](https://github.com/databricks/cli/pull/2114)).
|
||||
* Bump astral-sh/setup-uv from 4 to 5 ([#2116](https://github.com/databricks/cli/pull/2116)).
|
||||
* Bump golang.org/x/oauth2 from 0.24.0 to 0.25.0 ([#2080](https://github.com/databricks/cli/pull/2080)).
|
||||
* Bump github.com/hashicorp/hc-install from 0.9.0 to 0.9.1 ([#2079](https://github.com/databricks/cli/pull/2079)).
|
||||
* Bump golang.org/x/term from 0.27.0 to 0.28.0 ([#2078](https://github.com/databricks/cli/pull/2078)).
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.54.0 to 0.55.0 ([#2126](https://github.com/databricks/cli/pull/2126)).
|
||||
|
||||
## [Release] Release v0.238.0
|
||||
|
||||
Bundles:
|
||||
|
|
36
Makefile
36
Makefile
|
@ -1,12 +1,18 @@
|
|||
default: build
|
||||
default: vendor fmt lint tidy
|
||||
|
||||
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||
|
||||
GOTESTSUM_FORMAT ?= pkgname-and-test-fails
|
||||
GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
|
||||
|
||||
|
||||
lint:
|
||||
golangci-lint run --fix
|
||||
|
||||
tidy:
|
||||
@# not part of golangci-lint, apparently
|
||||
go mod tidy
|
||||
|
||||
lintcheck:
|
||||
golangci-lint run ./...
|
||||
|
||||
|
@ -14,17 +20,26 @@ lintcheck:
|
|||
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
|
||||
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
|
||||
fmt:
|
||||
ruff format -q
|
||||
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
|
||||
|
||||
test:
|
||||
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- ${PACKAGES}
|
||||
${GOTESTSUM_CMD} -- ${PACKAGES}
|
||||
|
||||
cover:
|
||||
gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped -- -coverprofile=coverage.txt ${PACKAGES}
|
||||
rm -fr ./acceptance/build/cover/
|
||||
VERBOSE_TEST=1 CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
||||
rm -fr ./acceptance/build/cover-merged/
|
||||
mkdir -p acceptance/build/cover-merged/
|
||||
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
||||
go tool covdata textfmt -i acceptance/build/cover-merged -o coverage-acceptance.txt
|
||||
|
||||
showcover:
|
||||
go tool cover -html=coverage.txt
|
||||
|
||||
acc-showcover:
|
||||
go tool cover -html=coverage-acceptance.txt
|
||||
|
||||
build: vendor
|
||||
go build -mod vendor
|
||||
|
||||
|
@ -33,16 +48,19 @@ snapshot:
|
|||
|
||||
vendor:
|
||||
go mod vendor
|
||||
|
||||
|
||||
schema:
|
||||
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||
|
||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||
docs:
|
||||
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
|
||||
|
||||
integration:
|
||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./acceptance ./integration/..." -- -parallel 4 -timeout=2h
|
||||
|
||||
integration: vendor
|
||||
$(INTEGRATION)
|
||||
|
||||
integration-short:
|
||||
$(INTEGRATION) -short
|
||||
integration-short: vendor
|
||||
VERBOSE_TEST=1 $(INTEGRATION) -short
|
||||
|
||||
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short
|
||||
.PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs
|
||||
|
|
9
NOTICE
9
NOTICE
|
@ -105,3 +105,12 @@ License - https://github.com/wI2L/jsondiff/blob/master/LICENSE
|
|||
https://github.com/hexops/gotextdiff
|
||||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
||||
|
||||
https://github.com/BurntSushi/toml
|
||||
Copyright (c) 2013 TOML authors
|
||||
https://github.com/BurntSushi/toml/blob/master/COPYING
|
||||
|
||||
dario.cat/mergo
|
||||
Copyright (c) 2013 Dario Castañé. All rights reserved.
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
https://github.com/darccio/mergo/blob/master/LICENSE
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
build
|
|
@ -17,3 +17,5 @@ For more complex tests one can also use:
|
|||
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
||||
- `trace` helper: prints the arguments before executing the command.
|
||||
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
||||
|
||||
See [selftest](./selftest) for a toy test.
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
package acceptance_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
|
@ -13,19 +17,45 @@ import (
|
|||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/env"
|
||||
"github.com/databricks/cli/libs/testdiff"
|
||||
"github.com/databricks/cli/libs/testserver"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var KeepTmp = os.Getenv("KEEP_TMP") != ""
|
||||
var (
|
||||
KeepTmp bool
|
||||
NoRepl bool
|
||||
VerboseTest bool = os.Getenv("VERBOSE_TEST") != ""
|
||||
)
|
||||
|
||||
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
|
||||
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
|
||||
// example: var SingleTest = "bundle/variables/empty"
|
||||
var SingleTest = ""
|
||||
|
||||
// If enabled, instead of compiling and running CLI externally, we'll start in-process server that accepts and runs
|
||||
// CLI commands. The $CLI in test scripts is a helper that just forwards command-line arguments to this server (see bin/callserver.py).
|
||||
// Also disables parallelism in tests.
|
||||
var InprocessMode bool
|
||||
|
||||
func init() {
|
||||
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
|
||||
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
|
||||
flag.BoolVar(&NoRepl, "norepl", false, "Do not apply any replacements (for debugging)")
|
||||
}
|
||||
|
||||
const (
|
||||
EntryPointScript = "script"
|
||||
CleanupScript = "script.cleanup"
|
||||
PrepareScript = "script.prepare"
|
||||
MaxFileSize = 100_000
|
||||
)
|
||||
|
||||
var Scripts = map[string]bool{
|
||||
|
@ -35,37 +65,132 @@ var Scripts = map[string]bool{
|
|||
}
|
||||
|
||||
func TestAccept(t *testing.T) {
|
||||
testAccept(t, InprocessMode, SingleTest)
|
||||
}
|
||||
|
||||
func TestInprocessMode(t *testing.T) {
|
||||
if InprocessMode {
|
||||
t.Skip("Already tested by TestAccept")
|
||||
}
|
||||
require.Equal(t, 1, testAccept(t, true, "selftest"))
|
||||
}
|
||||
|
||||
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||
repls := testdiff.ReplacementsContext{}
|
||||
cwd, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
|
||||
execPath := BuildCLI(t, cwd)
|
||||
// $CLI is what test scripts are using
|
||||
buildDir := filepath.Join(cwd, "build", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
|
||||
|
||||
// Download terraform and provider and create config; this also creates build directory.
|
||||
RunCommand(t, []string{"python3", filepath.Join(cwd, "install_terraform.py"), "--targetdir", buildDir}, ".")
|
||||
|
||||
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
||||
|
||||
if coverDir != "" {
|
||||
require.NoError(t, os.MkdirAll(coverDir, os.ModePerm))
|
||||
coverDir, err = filepath.Abs(coverDir)
|
||||
require.NoError(t, err)
|
||||
t.Logf("Writing coverage to %s", coverDir)
|
||||
}
|
||||
|
||||
execPath := ""
|
||||
|
||||
if InprocessMode {
|
||||
cmdServer := StartCmdServer(t)
|
||||
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
|
||||
execPath = filepath.Join(cwd, "bin", "callserver.py")
|
||||
} else {
|
||||
execPath = BuildCLI(t, buildDir, coverDir)
|
||||
}
|
||||
|
||||
t.Setenv("CLI", execPath)
|
||||
repls.SetPath(execPath, "[CLI]")
|
||||
|
||||
// Make helper scripts available
|
||||
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||
|
||||
server := StartServer(t)
|
||||
AddHandlers(server)
|
||||
// Redirect API access to local server:
|
||||
t.Setenv("DATABRICKS_HOST", fmt.Sprintf("http://127.0.0.1:%d", server.Port))
|
||||
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
||||
tempHomeDir := t.TempDir()
|
||||
repls.SetPath(tempHomeDir, "[TMPHOME]")
|
||||
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||
|
||||
homeDir := t.TempDir()
|
||||
// Do not read user's ~/.databrickscfg
|
||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
|
||||
uvCache := getUVDefaultCacheDir(t)
|
||||
t.Setenv("UV_CACHE_DIR", uvCache)
|
||||
|
||||
repls := testdiff.ReplacementsContext{}
|
||||
repls.Set(execPath, "$CLI")
|
||||
ctx := context.Background()
|
||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||
|
||||
if cloudEnv == "" {
|
||||
defaultServer := testserver.New(t)
|
||||
AddHandlers(defaultServer)
|
||||
// Redirect API access to local server:
|
||||
t.Setenv("DATABRICKS_HOST", defaultServer.URL)
|
||||
|
||||
homeDir := t.TempDir()
|
||||
// Do not read user's ~/.databrickscfg
|
||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||
}
|
||||
|
||||
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
|
||||
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
|
||||
|
||||
terraformExecPath := filepath.Join(buildDir, "terraform")
|
||||
if runtime.GOOS == "windows" {
|
||||
terraformExecPath += ".exe"
|
||||
}
|
||||
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
|
||||
t.Setenv("TERRAFORM", terraformExecPath)
|
||||
repls.SetPath(terraformExecPath, "[TERRAFORM]")
|
||||
|
||||
// do it last so that full paths match first:
|
||||
repls.SetPath(buildDir, "[BUILD_DIR]")
|
||||
|
||||
var config databricks.Config
|
||||
if cloudEnv == "" {
|
||||
// use fake token for local tests
|
||||
config = databricks.Config{Token: "dbapi1234"}
|
||||
} else {
|
||||
// non-local tests rely on environment variables
|
||||
config = databricks.Config{}
|
||||
}
|
||||
workspaceClient, err := databricks.NewWorkspaceClient(&config)
|
||||
require.NoError(t, err)
|
||||
|
||||
user, err := workspaceClient.CurrentUser.Me(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
||||
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||
testdiff.PrepareReplacementsDevVersion(t, &repls)
|
||||
testdiff.PrepareReplacementSdkVersion(t, &repls)
|
||||
testdiff.PrepareReplacementsGoVersion(t, &repls)
|
||||
|
||||
testDirs := getTests(t)
|
||||
require.NotEmpty(t, testDirs)
|
||||
|
||||
if singleTest != "" {
|
||||
testDirs = slices.DeleteFunc(testDirs, func(n string) bool {
|
||||
return n != singleTest
|
||||
})
|
||||
require.NotEmpty(t, testDirs, "singleTest=%#v did not match any tests\n%#v", singleTest, testDirs)
|
||||
}
|
||||
|
||||
for _, dir := range testDirs {
|
||||
t.Run(dir, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
runTest(t, dir, repls)
|
||||
testName := strings.ReplaceAll(dir, "\\", "/")
|
||||
t.Run(testName, func(t *testing.T) {
|
||||
if !InprocessMode {
|
||||
t.Parallel()
|
||||
}
|
||||
|
||||
runTest(t, dir, coverDir, repls.Clone())
|
||||
})
|
||||
}
|
||||
|
||||
return len(testDirs)
|
||||
}
|
||||
|
||||
func getTests(t *testing.T) []string {
|
||||
|
@ -88,7 +213,19 @@ func getTests(t *testing.T) []string {
|
|||
return testDirs
|
||||
}
|
||||
|
||||
func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) {
|
||||
func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsContext) {
|
||||
config, configPath := LoadConfig(t, dir)
|
||||
|
||||
isEnabled, isPresent := config.GOOS[runtime.GOOS]
|
||||
if isPresent && !isEnabled {
|
||||
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
||||
}
|
||||
|
||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||
if config.LocalOnly && cloudEnv != "" {
|
||||
t.Skipf("Disabled via LocalOnly setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
|
||||
}
|
||||
|
||||
var tmpDir string
|
||||
var err error
|
||||
if KeepTmp {
|
||||
|
@ -101,6 +238,9 @@ func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) {
|
|||
tmpDir = t.TempDir()
|
||||
}
|
||||
|
||||
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
|
||||
repls.Repls = append(repls.Repls, config.Repls...)
|
||||
|
||||
scriptContents := readMergedScriptContents(t, dir)
|
||||
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||
|
||||
|
@ -111,70 +251,175 @@ func runTest(t *testing.T, dir string, repls testdiff.ReplacementsContext) {
|
|||
|
||||
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Env = os.Environ()
|
||||
|
||||
// Start a new server with a custom configuration if the acceptance test
|
||||
// specifies a custom server stubs.
|
||||
var server *testserver.Server
|
||||
|
||||
// Start a new server for this test if either:
|
||||
// 1. A custom server spec is defined in the test configuration.
|
||||
// 2. The test is configured to record requests and assert on them. We need
|
||||
// a duplicate of the default server to record requests because the default
|
||||
// server otherwise is a shared resource.
|
||||
if cloudEnv == "" && (len(config.Server) > 0 || config.RecordRequests) {
|
||||
server = testserver.New(t)
|
||||
server.RecordRequests = config.RecordRequests
|
||||
server.IncludeRequestHeaders = config.IncludeRequestHeaders
|
||||
|
||||
// If no custom server stubs are defined, add the default handlers.
|
||||
if len(config.Server) == 0 {
|
||||
AddHandlers(server)
|
||||
}
|
||||
|
||||
for _, stub := range config.Server {
|
||||
require.NotEmpty(t, stub.Pattern)
|
||||
server.Handle(stub.Pattern, func(fakeWorkspace *testserver.FakeWorkspace, req *http.Request) (any, int) {
|
||||
statusCode := http.StatusOK
|
||||
if stub.Response.StatusCode != 0 {
|
||||
statusCode = stub.Response.StatusCode
|
||||
}
|
||||
return stub.Response.Body, statusCode
|
||||
})
|
||||
}
|
||||
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+server.URL)
|
||||
}
|
||||
|
||||
if coverDir != "" {
|
||||
// Creating individual coverage directory for each test, because writing to the same one
|
||||
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||
// +error: coverage meta-data emit failed: writing ... rename .../tmp.covmeta.b3f... .../covmeta.b3f2c...: no such file or directory
|
||||
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
||||
err := os.MkdirAll(coverDir, os.ModePerm)
|
||||
require.NoError(t, err)
|
||||
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
||||
}
|
||||
|
||||
// Each local test should use a new token that will result into a new fake workspace,
|
||||
// so that test don't interfere with each other.
|
||||
if cloudEnv == "" {
|
||||
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
|
||||
token := "dbapi" + tokenSuffix
|
||||
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+token)
|
||||
repls.Set(token, "[DATABRICKS_TOKEN]")
|
||||
}
|
||||
|
||||
// Write combined output to a file
|
||||
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
||||
require.NoError(t, err)
|
||||
cmd.Stdout = out
|
||||
cmd.Stderr = out
|
||||
cmd.Dir = tmpDir
|
||||
outB, err := cmd.CombinedOutput()
|
||||
err = cmd.Run()
|
||||
|
||||
out := formatOutput(string(outB), err)
|
||||
out = repls.Replace(out)
|
||||
doComparison(t, filepath.Join(dir, "output.txt"), "script output", out)
|
||||
// Write the requests made to the server to a output file if the test is
|
||||
// configured to record requests.
|
||||
if config.RecordRequests {
|
||||
f, err := os.OpenFile(filepath.Join(tmpDir, "out.requests.txt"), os.O_CREATE|os.O_WRONLY, 0o644)
|
||||
require.NoError(t, err)
|
||||
|
||||
for key := range outputs {
|
||||
if key == "output.txt" {
|
||||
// handled above
|
||||
continue
|
||||
for _, req := range server.Requests {
|
||||
reqJson, err := json.MarshalIndent(req, "", " ")
|
||||
require.NoError(t, err)
|
||||
|
||||
reqJsonWithRepls := repls.Replace(string(reqJson))
|
||||
_, err = f.WriteString(reqJsonWithRepls + "\n")
|
||||
require.NoError(t, err)
|
||||
}
|
||||
pathNew := filepath.Join(tmpDir, key)
|
||||
newValBytes, err := os.ReadFile(pathNew)
|
||||
if err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
t.Errorf("%s: expected to find this file but could not (%s)", key, tmpDir)
|
||||
} else {
|
||||
t.Errorf("%s: could not read: %s", key, err)
|
||||
}
|
||||
continue
|
||||
}
|
||||
pathExpected := filepath.Join(dir, key)
|
||||
newVal := repls.Replace(string(newValBytes))
|
||||
doComparison(t, pathExpected, pathNew, newVal)
|
||||
|
||||
err = f.Close()
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
// Include exit code in output (if non-zero)
|
||||
formatOutput(out, err)
|
||||
require.NoError(t, out.Close())
|
||||
|
||||
printedRepls := false
|
||||
|
||||
// Compare expected outputs
|
||||
for relPath := range outputs {
|
||||
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||
}
|
||||
|
||||
// Make sure there are not unaccounted for new files
|
||||
files, err := os.ReadDir(tmpDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
for _, f := range files {
|
||||
name := f.Name()
|
||||
if _, ok := inputs[name]; ok {
|
||||
files := ListDir(t, tmpDir)
|
||||
unexpected := []string{}
|
||||
for _, relPath := range files {
|
||||
if _, ok := inputs[relPath]; ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := outputs[name]; ok {
|
||||
if _, ok := outputs[relPath]; ok {
|
||||
continue
|
||||
}
|
||||
t.Errorf("Unexpected output: %s", f)
|
||||
if strings.HasPrefix(name, "out") {
|
||||
unexpected = append(unexpected, relPath)
|
||||
if strings.HasPrefix(relPath, "out") {
|
||||
// We have a new file starting with "out"
|
||||
// Show the contents & support overwrite mode for it:
|
||||
pathNew := filepath.Join(tmpDir, name)
|
||||
newVal := testutil.ReadFile(t, pathNew)
|
||||
newVal = repls.Replace(newVal)
|
||||
doComparison(t, filepath.Join(dir, name), filepath.Join(tmpDir, name), newVal)
|
||||
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||
}
|
||||
}
|
||||
|
||||
if len(unexpected) > 0 {
|
||||
t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
|
||||
}
|
||||
}
|
||||
|
||||
func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) {
|
||||
valueNew = testdiff.NormalizeNewlines(valueNew)
|
||||
valueExpected := string(readIfExists(t, pathExpected))
|
||||
valueExpected = testdiff.NormalizeNewlines(valueExpected)
|
||||
testdiff.AssertEqualTexts(t, pathExpected, pathNew, valueExpected, valueNew)
|
||||
if testdiff.OverwriteMode {
|
||||
if valueNew != "" {
|
||||
t.Logf("Overwriting: %s", pathExpected)
|
||||
testutil.WriteFile(t, pathExpected, valueNew)
|
||||
} else {
|
||||
t.Logf("Removing: %s", pathExpected)
|
||||
_ = os.Remove(pathExpected)
|
||||
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
|
||||
pathRef := filepath.Join(dirRef, relPath)
|
||||
pathNew := filepath.Join(dirNew, relPath)
|
||||
bufRef, okRef := tryReading(t, pathRef)
|
||||
bufNew, okNew := tryReading(t, pathNew)
|
||||
if !okRef && !okNew {
|
||||
t.Errorf("Both files are missing or have errors: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||
return
|
||||
}
|
||||
|
||||
valueRef := testdiff.NormalizeNewlines(bufRef)
|
||||
valueNew := testdiff.NormalizeNewlines(bufNew)
|
||||
|
||||
// Apply replacements to the new value only.
|
||||
// The reference value is stored after applying replacements.
|
||||
if !NoRepl {
|
||||
valueNew = repls.Replace(valueNew)
|
||||
}
|
||||
|
||||
// The test did not produce an expected output file.
|
||||
if okRef && !okNew {
|
||||
t.Errorf("Missing output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||
if testdiff.OverwriteMode {
|
||||
t.Logf("Removing output file: %s", relPath)
|
||||
require.NoError(t, os.Remove(pathRef))
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// The test produced an unexpected output file.
|
||||
if !okRef && okNew {
|
||||
t.Errorf("Unexpected output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||
if testdiff.OverwriteMode {
|
||||
t.Logf("Writing output file: %s", relPath)
|
||||
testutil.WriteFile(t, pathRef, valueNew)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Compare the reference and new values.
|
||||
equal := testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||
if !equal && testdiff.OverwriteMode {
|
||||
t.Logf("Overwriting existing output file: %s", relPath)
|
||||
testutil.WriteFile(t, pathRef, valueNew)
|
||||
}
|
||||
|
||||
if VerboseTest && !equal && printedRepls != nil && !*printedRepls {
|
||||
*printedRepls = true
|
||||
var items []string
|
||||
for _, item := range repls.Repls {
|
||||
items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
|
||||
}
|
||||
t.Log("Available replacements:\n" + strings.Join(items, "\n"))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -182,18 +427,23 @@ func doComparison(t *testing.T, pathExpected, pathNew, valueNew string) {
|
|||
// Note, cleanups are not executed if main script fails; that's not a huge issue, since it runs it temp dir.
|
||||
func readMergedScriptContents(t *testing.T, dir string) string {
|
||||
scriptContents := testutil.ReadFile(t, filepath.Join(dir, EntryPointScript))
|
||||
|
||||
// Wrap script contents in a subshell such that changing the working
|
||||
// directory only affects the main script and not cleanup.
|
||||
scriptContents = "(\n" + scriptContents + ")\n"
|
||||
|
||||
prepares := []string{}
|
||||
cleanups := []string{}
|
||||
|
||||
for {
|
||||
x := readIfExists(t, filepath.Join(dir, CleanupScript))
|
||||
if len(x) > 0 {
|
||||
cleanups = append(cleanups, string(x))
|
||||
x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
|
||||
if ok {
|
||||
cleanups = append(cleanups, x)
|
||||
}
|
||||
|
||||
x = readIfExists(t, filepath.Join(dir, PrepareScript))
|
||||
if len(x) > 0 {
|
||||
prepares = append(prepares, string(x))
|
||||
x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
|
||||
if ok {
|
||||
prepares = append(prepares, x)
|
||||
}
|
||||
|
||||
if dir == "" || dir == "." {
|
||||
|
@ -210,28 +460,30 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
|||
return strings.Join(prepares, "\n")
|
||||
}
|
||||
|
||||
func BuildCLI(t *testing.T, cwd string) string {
|
||||
execPath := filepath.Join(cwd, "build", "databricks")
|
||||
func BuildCLI(t *testing.T, buildDir, coverDir string) string {
|
||||
execPath := filepath.Join(buildDir, "databricks")
|
||||
if runtime.GOOS == "windows" {
|
||||
execPath += ".exe"
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
args := []string{"go", "build", "-mod", "vendor", "-o", execPath}
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = ".."
|
||||
out, err := cmd.CombinedOutput()
|
||||
elapsed := time.Since(start)
|
||||
t.Logf("%s took %s", args, elapsed)
|
||||
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
|
||||
if len(out) > 0 {
|
||||
t.Logf("go build output: %s: %s", args, out)
|
||||
args := []string{
|
||||
"go", "build",
|
||||
"-mod", "vendor",
|
||||
"-o", execPath,
|
||||
}
|
||||
|
||||
// Quick check + warm up cache:
|
||||
cmd = exec.Command(execPath, "--version")
|
||||
out, err = cmd.CombinedOutput()
|
||||
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
|
||||
if coverDir != "" {
|
||||
args = append(args, "-cover")
|
||||
}
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
// Get this error on my local Windows:
|
||||
// error obtaining VCS status: exit status 128
|
||||
// Use -buildvcs=false to disable VCS stamping.
|
||||
args = append(args, "-buildvcs=false")
|
||||
}
|
||||
|
||||
RunCommand(t, args, "..")
|
||||
return execPath
|
||||
}
|
||||
|
||||
|
@ -252,29 +504,45 @@ func copyFile(src, dst string) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func formatOutput(out string, err error) string {
|
||||
func formatOutput(w io.Writer, err error) {
|
||||
if err == nil {
|
||||
return out
|
||||
return
|
||||
}
|
||||
if exiterr, ok := err.(*exec.ExitError); ok {
|
||||
exitCode := exiterr.ExitCode()
|
||||
out += fmt.Sprintf("\nExit code: %d\n", exitCode)
|
||||
fmt.Fprintf(w, "\nExit code: %d\n", exitCode)
|
||||
} else {
|
||||
out += fmt.Sprintf("\nError: %s\n", err)
|
||||
fmt.Fprintf(w, "\nError: %s\n", err)
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
func readIfExists(t *testing.T, path string) []byte {
|
||||
data, err := os.ReadFile(path)
|
||||
if err == nil {
|
||||
return data
|
||||
func tryReading(t *testing.T, path string) (string, bool) {
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
t.Errorf("%s: %s", path, err)
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("%s: %s", path, err)
|
||||
if info.Size() > MaxFileSize {
|
||||
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
|
||||
return "", false
|
||||
}
|
||||
return []byte{}
|
||||
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
// already checked ErrNotExist above
|
||||
t.Errorf("%s: %s", path, err)
|
||||
return "", false
|
||||
}
|
||||
|
||||
if !utf8.Valid(data) {
|
||||
t.Errorf("%s: not valid utf-8", path)
|
||||
return "", false
|
||||
}
|
||||
|
||||
return string(data), true
|
||||
}
|
||||
|
||||
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||
|
@ -289,8 +557,10 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if strings.HasPrefix(name, "out") {
|
||||
outputs[relPath] = true
|
||||
if strings.HasPrefix(relPath, "out") {
|
||||
if !info.IsDir() {
|
||||
outputs[relPath] = true
|
||||
}
|
||||
return nil
|
||||
} else {
|
||||
inputs[relPath] = true
|
||||
|
@ -309,3 +579,59 @@ func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
|||
return copyFile(path, destPath)
|
||||
})
|
||||
}
|
||||
|
||||
func ListDir(t *testing.T, src string) []string {
|
||||
var files []string
|
||||
err := filepath.Walk(src, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
// Do not FailNow here.
|
||||
// The output comparison is happening after this call which includes output.txt which
|
||||
// includes errors printed by commands which include explanation why a given file cannot be read.
|
||||
t.Errorf("Error when listing %s: path=%s: %s", src, path, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(src, path)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
files = append(files, relPath)
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
t.Errorf("Failed to list %s: %s", src, err)
|
||||
}
|
||||
return files
|
||||
}
|
||||
|
||||
func getUVDefaultCacheDir(t *testing.T) string {
|
||||
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
|
||||
// the default cache directory is
|
||||
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
|
||||
cacheDir, err := os.UserCacheDir()
|
||||
require.NoError(t, err)
|
||||
if runtime.GOOS == "windows" {
|
||||
return cacheDir + "\\uv\\cache"
|
||||
} else {
|
||||
return cacheDir + "/uv"
|
||||
}
|
||||
}
|
||||
|
||||
func RunCommand(t *testing.T, args []string, dir string) {
|
||||
start := time.Now()
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = dir
|
||||
out, err := cmd.CombinedOutput()
|
||||
elapsed := time.Since(start)
|
||||
t.Logf("%s took %s", args, elapsed)
|
||||
|
||||
require.NoError(t, err, "%s failed: %s\n%s", args, err, out)
|
||||
if len(out) > 0 {
|
||||
t.Logf("%s output: %s", args, out)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
[DEFAULT]
|
||||
host = $DATABRICKS_HOST
|
||||
|
||||
[profile_name]
|
||||
host = https://test@non-existing-subdomain.databricks.com
|
|
@ -0,0 +1,14 @@
|
|||
bundle:
|
||||
name: test-auth
|
||||
|
||||
workspace:
|
||||
host: $DATABRICKS_HOST
|
||||
|
||||
targets:
|
||||
dev:
|
||||
default: true
|
||||
workspace:
|
||||
host: $DATABRICKS_HOST
|
||||
prod:
|
||||
workspace:
|
||||
host: https://bar.com
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
=== Inside the bundle, no flags
|
||||
>>> errcode [CLI] current-user me
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, target flags
|
||||
>>> errcode [CLI] current-user me -t dev
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, target and matching profile
|
||||
>>> errcode [CLI] current-user me -t dev -p DEFAULT
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
|
||||
>>> errcode [CLI] current-user me -p profile_name
|
||||
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Inside the bundle, target and not matching profile
|
||||
>>> errcode [CLI] current-user me -t dev -p profile_name
|
||||
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_URL]
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Outside the bundle, no flags
|
||||
>>> errcode [CLI] current-user me
|
||||
"[USERNAME]"
|
||||
|
||||
=== Outside the bundle, profile flag
|
||||
>>> errcode [CLI] current-user me -p profile_name
|
||||
"[USERNAME]"
|
|
@ -0,0 +1,30 @@
|
|||
# Replace placeholder with an actual host URL
|
||||
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
|
||||
envsubst < .databrickscfg > out && mv out .databrickscfg
|
||||
export DATABRICKS_CONFIG_FILE=.databrickscfg
|
||||
|
||||
host=$DATABRICKS_HOST
|
||||
unset DATABRICKS_HOST
|
||||
|
||||
title "Inside the bundle, no flags"
|
||||
trace errcode $CLI current-user me | jq .userName
|
||||
|
||||
title "Inside the bundle, target flags"
|
||||
trace errcode $CLI current-user me -t dev | jq .userName
|
||||
|
||||
title "Inside the bundle, target and matching profile"
|
||||
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
|
||||
|
||||
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
|
||||
trace errcode $CLI current-user me -p profile_name | jq .userName
|
||||
|
||||
title "Inside the bundle, target and not matching profile"
|
||||
trace errcode $CLI current-user me -t dev -p profile_name
|
||||
|
||||
cd ..
|
||||
export DATABRICKS_HOST=$host
|
||||
title "Outside the bundle, no flags"
|
||||
trace errcode $CLI current-user me | jq .userName
|
||||
|
||||
title "Outside the bundle, profile flag"
|
||||
trace errcode $CLI current-user me -p profile_name | jq .userName
|
|
@ -0,0 +1,8 @@
|
|||
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
|
||||
|
||||
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
|
||||
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
|
||||
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
|
||||
[[Repls]]
|
||||
Old='DATABRICKS_HOST'
|
||||
New='DATABRICKS_URL'
|
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import urllib.request
|
||||
from urllib.parse import urlencode
|
||||
|
||||
env = {}
|
||||
for key, value in os.environ.items():
|
||||
if len(value) > 10_000:
|
||||
sys.stderr.write(f"Dropping key={key} value len={len(value)}\n")
|
||||
continue
|
||||
env[key] = value
|
||||
|
||||
q = {
|
||||
"args": " ".join(sys.argv[1:]),
|
||||
"cwd": os.getcwd(),
|
||||
"env": json.dumps(env),
|
||||
}
|
||||
|
||||
url = os.environ["CMD_SERVER_URL"] + "/?" + urlencode(q)
|
||||
if len(url) > 100_000:
|
||||
sys.exit("url too large")
|
||||
|
||||
resp = urllib.request.urlopen(url)
|
||||
assert resp.status == 200, (resp.status, resp.url, resp.headers)
|
||||
result = json.load(resp)
|
||||
sys.stderr.write(result["stderr"])
|
||||
sys.stdout.write(result["stdout"])
|
||||
exitcode = int(result["exitcode"])
|
||||
sys.exit(exitcode)
|
|
@ -4,6 +4,7 @@ Helper to sort blocks in text file. A block is a set of lines separated from oth
|
|||
|
||||
This is to workaround non-determinism in the output.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
blocks = []
|
||||
|
@ -11,10 +12,10 @@ blocks = []
|
|||
for line in sys.stdin:
|
||||
if not line.strip():
|
||||
if blocks and blocks[-1]:
|
||||
blocks.append('')
|
||||
blocks.append("")
|
||||
continue
|
||||
if not blocks:
|
||||
blocks.append('')
|
||||
blocks.append("")
|
||||
blocks[-1] += line
|
||||
|
||||
blocks.sort()
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
lines = sys.stdin.readlines()
|
||||
lines.sort()
|
||||
sys.stdout.write("".join(lines))
|
|
@ -1 +0,0 @@
|
|||
databricks
|
|
@ -0,0 +1,50 @@
|
|||
bundle:
|
||||
name: same_name_libraries
|
||||
|
||||
variables:
|
||||
cluster:
|
||||
default:
|
||||
spark_version: 15.4.x-scala2.12
|
||||
node_type_id: i3.xlarge
|
||||
data_security_mode: SINGLE_USER
|
||||
num_workers: 0
|
||||
spark_conf:
|
||||
spark.master: "local[*, 4]"
|
||||
spark.databricks.cluster.profile: singleNode
|
||||
custom_tags:
|
||||
ResourceClass: SingleNode
|
||||
|
||||
artifacts:
|
||||
whl1:
|
||||
type: whl
|
||||
path: ./whl1
|
||||
whl2:
|
||||
type: whl
|
||||
path: ./whl2
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test:
|
||||
name: "test"
|
||||
tasks:
|
||||
- task_key: task1
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
||||
- task_key: task2
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl2/dist/*.whl
|
||||
- task_key: task3
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
>>> errcode [CLI] bundle deploy
|
||||
Building whl1...
|
||||
Building whl2...
|
||||
Error: Duplicate local library name my_default_python-0.0.1-py3-none-any.whl
|
||||
at resources.jobs.test.tasks[0].libraries[0].whl
|
||||
resources.jobs.test.tasks[1].libraries[0].whl
|
||||
in databricks.yml:36:15
|
||||
databricks.yml:43:15
|
||||
|
||||
Local library names must be unique
|
||||
|
||||
|
||||
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
|||
trace errcode $CLI bundle deploy
|
||||
rm -rf whl1 whl2
|
|
@ -0,0 +1,36 @@
|
|||
"""
|
||||
setup.py configuration script describing how to build and package this project.
|
||||
|
||||
This file is primarily used by the setuptools library and typically should not
|
||||
be executed directly. See README.md for how to deploy, test, and run
|
||||
the my_default_python project.
|
||||
"""
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_default_python
|
||||
|
||||
setup(
|
||||
name="my_default_python",
|
||||
version=my_default_python.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_default_python/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_default_python.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.0.1"
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,36 @@
|
|||
"""
|
||||
setup.py configuration script describing how to build and package this project.
|
||||
|
||||
This file is primarily used by the setuptools library and typically should not
|
||||
be executed directly. See README.md for how to deploy, test, and run
|
||||
the my_default_python project.
|
||||
"""
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_default_python
|
||||
|
||||
setup(
|
||||
name="my_default_python",
|
||||
version=my_default_python.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_default_python/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_default_python.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.0.1"
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,2 @@
|
|||
bundle:
|
||||
name: debug
|
|
@ -0,0 +1,15 @@
|
|||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
|
||||
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||
10:07:59 Debug: non-retriable error: pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< {} pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< {} pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
|
@ -0,0 +1,92 @@
|
|||
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
|
||||
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load
|
||||
10:07:59 Info: Phase: load pid=12345 mutator=load
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit
|
||||
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
|
||||
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=<func>
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize
|
||||
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
|
||||
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
|
||||
< HTTP/1.1 200 OK
|
||||
< {
|
||||
< "id": "[USERID]",
|
||||
< "userName": "[USERNAME]"
|
||||
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators)
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
|
||||
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
|
||||
10:07:59 Debug: Apply pid=12345 mutator=validate
|
||||
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||
< HTTP/1.1 404 Not Found
|
||||
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
|
||||
> {
|
||||
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||
> }
|
||||
< HTTP/1.1 200 OK
|
||||
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||
< HTTP/1.1 200 OK
|
||||
< {
|
||||
< "object_type": "DIRECTORY",
|
||||
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||
10:07:59 Info: completed execution pid=12345 exit_code=0
|
|
@ -0,0 +1,7 @@
|
|||
Name: debug
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/debug/default
|
||||
|
||||
Validation OK!
|
|
@ -0,0 +1,4 @@
|
|||
$CLI bundle validate --debug 2> full.stderr.txt
|
||||
grep -vw parallel full.stderr.txt > out.stderr.txt
|
||||
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
|
||||
rm full.stderr.txt
|
|
@ -0,0 +1,18 @@
|
|||
LocalOnly = true
|
||||
|
||||
[[Repls]]
|
||||
# The keys are unsorted and also vary per OS
|
||||
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '
|
||||
New = 'Environment variables for Terraform: ...redacted... '
|
||||
|
||||
[[Repls]]
|
||||
Old = 'pid=[0-9]+'
|
||||
New = 'pid=12345'
|
||||
|
||||
[[Repls]]
|
||||
Old = '\d\d:\d\d:\d\d'
|
||||
New = '10:07:59'
|
||||
|
||||
[[Repls]]
|
||||
Old = '\\'
|
||||
New = '/'
|
|
@ -0,0 +1,2 @@
|
|||
bundle:
|
||||
name: git_job
|
|
@ -0,0 +1,17 @@
|
|||
resources:
|
||||
jobs:
|
||||
out:
|
||||
name: gitjob
|
||||
tasks:
|
||||
- task_key: test_task
|
||||
notebook_task:
|
||||
notebook_path: some/test/notebook.py
|
||||
- task_key: test_task_2
|
||||
notebook_task:
|
||||
notebook_path: /Workspace/Users/foo@bar.com/some/test/notebook.py
|
||||
source: WORKSPACE
|
||||
git_source:
|
||||
git_branch: main
|
||||
git_commit: abcdef
|
||||
git_provider: github
|
||||
git_url: https://git.databricks.com
|
|
@ -0,0 +1,2 @@
|
|||
Job is using Git source, skipping downloading files
|
||||
Job configuration successfully saved to out.job.yml
|
|
@ -0,0 +1 @@
|
|||
$CLI bundle generate job --existing-job-id 1234 --config-dir . --key out
|
|
@ -0,0 +1,33 @@
|
|||
LocalOnly = true # This test needs to run against stubbed Databricks API
|
||||
|
||||
[[Server]]
|
||||
Pattern = "GET /api/2.1/jobs/get"
|
||||
Response.Body = '''
|
||||
{
|
||||
"job_id": 11223344,
|
||||
"settings": {
|
||||
"name": "gitjob",
|
||||
"git_source": {
|
||||
"git_url": "https://git.databricks.com",
|
||||
"git_provider": "github",
|
||||
"git_branch": "main",
|
||||
"git_commit": "abcdef"
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"task_key": "test_task",
|
||||
"notebook_task": {
|
||||
"notebook_path": "some/test/notebook.py"
|
||||
}
|
||||
},
|
||||
{
|
||||
"task_key": "test_task_2",
|
||||
"notebook_task": {
|
||||
"source": "WORKSPACE",
|
||||
"notebook_path": "/Workspace/Users/foo@bar.com/some/test/notebook.py"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
'''
|
|
@ -0,0 +1,2 @@
|
|||
bundle:
|
||||
name: git-permerror
|
|
@ -0,0 +1,81 @@
|
|||
=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
|
||||
|
||||
>>> chmod 000 .git
|
||||
|
||||
>>> [CLI] bundle validate
|
||||
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||
|
||||
Name: git-permerror
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
|
||||
|
||||
Found 1 error
|
||||
|
||||
Exit code: 1
|
||||
|
||||
>>> [CLI] bundle validate -o json
|
||||
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||
|
||||
|
||||
Exit code: 1
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
||||
|
||||
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||
|
||||
|
||||
Exit code: 1
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
||||
|
||||
|
||||
=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
|
||||
|
||||
>>> chmod 000 .git/HEAD
|
||||
|
||||
>>> [CLI] bundle validate -o json
|
||||
Warn: failed to load current branch: open HEAD: permission denied
|
||||
Warn: failed to load latest commit: open HEAD: permission denied
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
||||
|
||||
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||
Warn: failed to load current branch: open HEAD: permission denied
|
||||
Warn: failed to load latest commit: open HEAD: permission denied
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
||||
|
||||
|
||||
=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
|
||||
|
||||
>>> chmod 000 .git/config
|
||||
|
||||
>>> [CLI] bundle validate -o json
|
||||
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||
|
||||
|
||||
Exit code: 1
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
||||
|
||||
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||
|
||||
|
||||
Exit code: 1
|
||||
{
|
||||
"bundle_root_path": "."
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
mkdir myrepo
|
||||
cd myrepo
|
||||
cp ../databricks.yml .
|
||||
git-repo-init
|
||||
mkdir -p subdir/a/b
|
||||
|
||||
printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
|
||||
trace chmod 000 .git
|
||||
errcode trace $CLI bundle validate
|
||||
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||
|
||||
printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
|
||||
chmod 700 .git
|
||||
trace chmod 000 .git/HEAD
|
||||
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||
|
||||
printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
|
||||
chmod 666 .git/HEAD
|
||||
trace chmod 000 .git/config
|
||||
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||
|
||||
cd ..
|
||||
rm -fr myrepo
|
|
@ -0,0 +1,5 @@
|
|||
Badness = "inferred flag is set to true incorrect; bundle_root_path is not correct; Warn and Error talk about the same; Warn goes to stderr, Error goes to stdout (for backward compat); Warning about permissions repeated twice"
|
||||
|
||||
[GOOS]
|
||||
# This test relies on chmod which does not work on Windows
|
||||
windows = false
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
>>> [CLI] bundle deploy --help
|
||||
Deploy bundle
|
||||
|
||||
Usage:
|
||||
databricks bundle deploy [flags]
|
||||
|
||||
Flags:
|
||||
--auto-approve Skip interactive approvals that might be required for deployment.
|
||||
-c, --cluster-id string Override cluster in the deployment with the given cluster ID.
|
||||
--fail-on-active-runs Fail if there are running jobs or pipelines in the deployment.
|
||||
--force Force-override Git branch validation.
|
||||
--force-lock Force acquisition of deployment lock.
|
||||
-h, --help help for deploy
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle deploy --help
|
|
@ -0,0 +1,22 @@
|
|||
|
||||
>>> [CLI] bundle deployment --help
|
||||
Deployment related commands
|
||||
|
||||
Usage:
|
||||
databricks bundle deployment [command]
|
||||
|
||||
Available Commands:
|
||||
bind Bind bundle-defined resources to existing resources
|
||||
unbind Unbind bundle-defined resources from its managed remote resource
|
||||
|
||||
Flags:
|
||||
-h, --help help for deployment
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||
|
||||
Use "databricks bundle deployment [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle deployment --help
|
|
@ -0,0 +1,18 @@
|
|||
|
||||
>>> [CLI] bundle destroy --help
|
||||
Destroy deployed bundle resources
|
||||
|
||||
Usage:
|
||||
databricks bundle destroy [flags]
|
||||
|
||||
Flags:
|
||||
--auto-approve Skip interactive approvals for deleting resources and files
|
||||
--force-lock Force acquisition of deployment lock.
|
||||
-h, --help help for destroy
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle destroy --help
|
|
@ -0,0 +1,24 @@
|
|||
|
||||
>>> [CLI] bundle generate dashboard --help
|
||||
Generate configuration for a dashboard
|
||||
|
||||
Usage:
|
||||
databricks bundle generate dashboard [flags]
|
||||
|
||||
Flags:
|
||||
-s, --dashboard-dir string directory to write the dashboard representation to (default "src")
|
||||
--existing-id string ID of the dashboard to generate configuration for
|
||||
--existing-path string workspace path of the dashboard to generate configuration for
|
||||
-f, --force force overwrite existing files in the output directory
|
||||
-h, --help help for dashboard
|
||||
--resource string resource key of dashboard to watch for changes
|
||||
-d, --resource-dir string directory to write the configuration to (default "resources")
|
||||
--watch watch for changes to the dashboard and update the configuration
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
--key string resource key to use for the generated configuration
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle generate dashboard --help
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
>>> [CLI] bundle generate job --help
|
||||
Generate bundle configuration for a job
|
||||
|
||||
Usage:
|
||||
databricks bundle generate job [flags]
|
||||
|
||||
Flags:
|
||||
-d, --config-dir string Dir path where the output config will be stored (default "resources")
|
||||
--existing-job-id int Job ID of the job to generate config for
|
||||
-f, --force Force overwrite existing files in the output directory
|
||||
-h, --help help for job
|
||||
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
--key string resource key to use for the generated configuration
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle generate job --help
|
|
@ -0,0 +1,21 @@
|
|||
|
||||
>>> [CLI] bundle generate pipeline --help
|
||||
Generate bundle configuration for a pipeline
|
||||
|
||||
Usage:
|
||||
databricks bundle generate pipeline [flags]
|
||||
|
||||
Flags:
|
||||
-d, --config-dir string Dir path where the output config will be stored (default "resources")
|
||||
--existing-pipeline-id string ID of the pipeline to generate config for
|
||||
-f, --force Force overwrite existing files in the output directory
|
||||
-h, --help help for pipeline
|
||||
-s, --source-dir string Dir path where the downloaded files will be stored (default "src")
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
--key string resource key to use for the generated configuration
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle generate pipeline --help
|
|
@ -0,0 +1,25 @@
|
|||
|
||||
>>> [CLI] bundle generate --help
|
||||
Generate bundle configuration
|
||||
|
||||
Usage:
|
||||
databricks bundle generate [command]
|
||||
|
||||
Available Commands:
|
||||
app Generate bundle configuration for a Databricks app
|
||||
dashboard Generate configuration for a dashboard
|
||||
job Generate bundle configuration for a job
|
||||
pipeline Generate bundle configuration for a pipeline
|
||||
|
||||
Flags:
|
||||
-h, --help help for generate
|
||||
--key string resource key to use for the generated configuration
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||
|
||||
Use "databricks bundle generate [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle generate --help
|
|
@ -0,0 +1,31 @@
|
|||
|
||||
>>> [CLI] bundle init --help
|
||||
Initialize using a bundle template.
|
||||
|
||||
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
|
||||
- default-python: The default Python template for Notebooks / Delta Live Tables / Workflows
|
||||
- default-sql: The default SQL template for .sql files that run with Databricks SQL
|
||||
- dbt-sql: The dbt SQL template (databricks.com/blog/delivering-cost-effective-data-real-time-dbt-and-databricks)
|
||||
- mlops-stacks: The Databricks MLOps Stacks template (github.com/databricks/mlops-stacks)
|
||||
- a local file system path with a template directory
|
||||
- a Git repository URL, e.g. https://github.com/my/repository
|
||||
|
||||
See https://docs.databricks.com/en/dev-tools/bundles/templates.html for more information on templates.
|
||||
|
||||
Usage:
|
||||
databricks bundle init [TEMPLATE_PATH] [flags]
|
||||
|
||||
Flags:
|
||||
--branch string Git branch to use for template initialization
|
||||
--config-file string JSON file containing key value pairs of input parameters required for template initialization.
|
||||
-h, --help help for init
|
||||
--output-dir string Directory to write the initialized template to.
|
||||
--tag string Git tag to use for template initialization
|
||||
--template-dir string Directory path within a Git repository containing the template.
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle init --help
|
|
@ -0,0 +1,17 @@
|
|||
|
||||
>>> [CLI] bundle open --help
|
||||
Open a resource in the browser
|
||||
|
||||
Usage:
|
||||
databricks bundle open [flags]
|
||||
|
||||
Flags:
|
||||
--force-pull Skip local cache and load the state from the remote workspace
|
||||
-h, --help help for open
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle open --help
|
|
@ -0,0 +1,57 @@
|
|||
|
||||
>>> [CLI] bundle run --help
|
||||
Run the job or pipeline identified by KEY.
|
||||
|
||||
The KEY is the unique identifier of the resource to run. In addition to
|
||||
customizing the run using any of the available flags, you can also specify
|
||||
keyword or positional arguments as shown in these examples:
|
||||
|
||||
databricks bundle run my_job -- --key1 value1 --key2 value2
|
||||
|
||||
Or:
|
||||
|
||||
databricks bundle run my_job -- value1 value2 value3
|
||||
|
||||
If the specified job uses job parameters or the job has a notebook task with
|
||||
parameters, the first example applies and flag names are mapped to the
|
||||
parameter names.
|
||||
|
||||
If the specified job does not use job parameters and the job has a Python file
|
||||
task or a Python wheel task, the second example applies.
|
||||
|
||||
Usage:
|
||||
databricks bundle run [flags] KEY
|
||||
|
||||
Job Flags:
|
||||
--params stringToString comma separated k=v pairs for job parameters (default [])
|
||||
|
||||
Job Task Flags:
|
||||
Note: please prefer use of job-level parameters (--param) over task-level parameters.
|
||||
For more information, see https://docs.databricks.com/en/workflows/jobs/create-run-jobs.html#pass-parameters-to-a-databricks-job-task
|
||||
--dbt-commands strings A list of commands to execute for jobs with DBT tasks.
|
||||
--jar-params strings A list of parameters for jobs with Spark JAR tasks.
|
||||
--notebook-params stringToString A map from keys to values for jobs with notebook tasks. (default [])
|
||||
--pipeline-params stringToString A map from keys to values for jobs with pipeline tasks. (default [])
|
||||
--python-named-params stringToString A map from keys to values for jobs with Python wheel tasks. (default [])
|
||||
--python-params strings A list of parameters for jobs with Python tasks.
|
||||
--spark-submit-params strings A list of parameters for jobs with Spark submit tasks.
|
||||
--sql-params stringToString A map from keys to values for jobs with SQL tasks. (default [])
|
||||
|
||||
Pipeline Flags:
|
||||
--full-refresh strings List of tables to reset and recompute.
|
||||
--full-refresh-all Perform a full graph reset and recompute.
|
||||
--refresh strings List of tables to update.
|
||||
--refresh-all Perform a full graph update.
|
||||
--validate-only Perform an update to validate graph correctness.
|
||||
|
||||
Flags:
|
||||
-h, --help help for run
|
||||
--no-wait Don't wait for the run to complete.
|
||||
--restart Restart the run if it is already running.
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle run --help
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
>>> [CLI] bundle schema --help
|
||||
Generate JSON Schema for bundle configuration
|
||||
|
||||
Usage:
|
||||
databricks bundle schema [flags]
|
||||
|
||||
Flags:
|
||||
-h, --help help for schema
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle schema --help
|
|
@ -0,0 +1,17 @@
|
|||
|
||||
>>> [CLI] bundle summary --help
|
||||
Summarize resources deployed by this bundle
|
||||
|
||||
Usage:
|
||||
databricks bundle summary [flags]
|
||||
|
||||
Flags:
|
||||
--force-pull Skip local cache and load the state from the remote workspace
|
||||
-h, --help help for summary
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle summary --help
|
|
@ -0,0 +1,19 @@
|
|||
|
||||
>>> [CLI] bundle sync --help
|
||||
Synchronize bundle tree to the workspace
|
||||
|
||||
Usage:
|
||||
databricks bundle sync [flags]
|
||||
|
||||
Flags:
|
||||
--full perform full synchronization (default is incremental)
|
||||
-h, --help help for sync
|
||||
--interval duration file system polling interval (for --watch) (default 1s)
|
||||
--output type type of the output format
|
||||
--watch watch local file system for changes
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle sync --help
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
>>> [CLI] bundle validate --help
|
||||
Validate configuration
|
||||
|
||||
Usage:
|
||||
databricks bundle validate [flags]
|
||||
|
||||
Flags:
|
||||
-h, --help help for validate
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle validate --help
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
>>> [CLI] bundle --help
|
||||
Databricks Asset Bundles let you express data/AI/analytics projects as code.
|
||||
|
||||
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
|
||||
|
||||
Usage:
|
||||
databricks bundle [command]
|
||||
|
||||
Available Commands:
|
||||
deploy Deploy bundle
|
||||
deployment Deployment related commands
|
||||
destroy Destroy deployed bundle resources
|
||||
generate Generate bundle configuration
|
||||
init Initialize using a bundle template
|
||||
open Open a resource in the browser
|
||||
run Run a job or pipeline update
|
||||
schema Generate JSON Schema for bundle configuration
|
||||
summary Summarize resources deployed by this bundle
|
||||
sync Synchronize bundle tree to the workspace
|
||||
validate Validate configuration
|
||||
|
||||
Flags:
|
||||
-h, --help help for bundle
|
||||
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
|
||||
|
||||
Global Flags:
|
||||
--debug enable debug logging
|
||||
-o, --output type output type: text or json (default text)
|
||||
-p, --profile string ~/.databrickscfg profile
|
||||
-t, --target string bundle target to use (if applicable)
|
||||
|
||||
Use "databricks bundle [command] --help" for more information about a command.
|
|
@ -0,0 +1 @@
|
|||
trace $CLI bundle --help
|
|
@ -0,0 +1,6 @@
|
|||
bundle:
|
||||
name: non_yaml_in_includes
|
||||
|
||||
include:
|
||||
- test.py
|
||||
- resources/*.yml
|
|
@ -0,0 +1,10 @@
|
|||
Error: Files in the 'include' configuration section must be YAML or JSON files.
|
||||
in databricks.yml:5:4
|
||||
|
||||
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
|
||||
|
||||
Name: non_yaml_in_includes
|
||||
|
||||
Found 1 error
|
||||
|
||||
Exit code: 1
|
|
@ -0,0 +1 @@
|
|||
$CLI bundle validate
|
|
@ -0,0 +1 @@
|
|||
print("Hello world")
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
>>> $CLI bundle validate -o json -t default
|
||||
>>> [CLI] bundle validate -o json -t default
|
||||
{
|
||||
"autoscale": {
|
||||
"max_workers": 7,
|
||||
|
@ -15,7 +15,7 @@
|
|||
"spark_version": "13.3.x-scala2.12"
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -o json -t development
|
||||
>>> [CLI] bundle validate -o json -t development
|
||||
{
|
||||
"autoscale": {
|
||||
"max_workers": 3,
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
|
||||
>>> $CLI bundle validate -o json -t development
|
||||
>>> [CLI] bundle validate -o json -t development
|
||||
{
|
||||
"foo": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
|
@ -27,12 +27,12 @@
|
|||
}
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -o json -t staging
|
||||
>>> [CLI] bundle validate -o json -t staging
|
||||
{
|
||||
"foo": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
|
||||
>>> $CLI bundle validate -o json -t development
|
||||
>>> [CLI] bundle validate -o json -t development
|
||||
{
|
||||
"foo": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
|
@ -27,21 +27,21 @@
|
|||
}
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -t development
|
||||
>>> [CLI] bundle validate -t development
|
||||
Name: override_job_cluster
|
||||
Target: development
|
||||
Workspace:
|
||||
User: tester@databricks.com
|
||||
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/development
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
|
||||
|
||||
Validation OK!
|
||||
|
||||
>>> $CLI bundle validate -o json -t staging
|
||||
>>> [CLI] bundle validate -o json -t staging
|
||||
{
|
||||
"foo": {
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
|
@ -64,11 +64,11 @@ Validation OK!
|
|||
}
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -t staging
|
||||
>>> [CLI] bundle validate -t staging
|
||||
Name: override_job_cluster
|
||||
Target: staging
|
||||
Workspace:
|
||||
User: tester@databricks.com
|
||||
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_cluster/staging
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
|
||||
|
||||
Validation OK!
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
>>> errcode $CLI bundle validate -o json -t development
|
||||
>>> errcode [CLI] bundle validate -o json -t development
|
||||
Error: file ./test1.py not found
|
||||
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
]
|
||||
}
|
||||
|
||||
>>> errcode $CLI bundle validate -o json -t staging
|
||||
>>> errcode [CLI] bundle validate -o json -t staging
|
||||
Error: file ./test1.py not found
|
||||
|
||||
|
||||
|
@ -63,14 +63,14 @@ Exit code: 1
|
|||
]
|
||||
}
|
||||
|
||||
>>> errcode $CLI bundle validate -t staging
|
||||
>>> errcode [CLI] bundle validate -t staging
|
||||
Error: file ./test1.py not found
|
||||
|
||||
Name: override_job_tasks
|
||||
Target: staging
|
||||
Workspace:
|
||||
User: tester@databricks.com
|
||||
Path: /Workspace/Users/tester@databricks.com/.bundle/override_job_tasks/staging
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_tasks/staging
|
||||
|
||||
Found 1 error
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
>>> $CLI bundle validate -o json -t dev
|
||||
>>> [CLI] bundle validate -o json -t dev
|
||||
Warning: expected map, found string
|
||||
at resources.clusters.my_cluster
|
||||
in databricks.yml:6:17
|
||||
|
@ -13,7 +13,7 @@ Warning: expected map, found string
|
|||
}
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -t dev
|
||||
>>> [CLI] bundle validate -t dev
|
||||
Warning: expected map, found string
|
||||
at resources.clusters.my_cluster
|
||||
in databricks.yml:6:17
|
||||
|
@ -21,7 +21,7 @@ Warning: expected map, found string
|
|||
Name: merge-string-map
|
||||
Target: dev
|
||||
Workspace:
|
||||
User: tester@databricks.com
|
||||
Path: /Workspace/Users/tester@databricks.com/.bundle/merge-string-map/dev
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/merge-string-map/dev
|
||||
|
||||
Found 1 warning
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
>>> $CLI bundle validate -o json -t development
|
||||
>>> [CLI] bundle validate -o json -t development
|
||||
{
|
||||
"foo": {
|
||||
"clusters": [
|
||||
|
@ -14,14 +14,14 @@
|
|||
],
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||
},
|
||||
"name": "job",
|
||||
"permissions": []
|
||||
}
|
||||
}
|
||||
|
||||
>>> $CLI bundle validate -o json -t staging
|
||||
>>> [CLI] bundle validate -o json -t staging
|
||||
{
|
||||
"foo": {
|
||||
"clusters": [
|
||||
|
@ -36,7 +36,7 @@
|
|||
],
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/tester@databricks.com/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||
},
|
||||
"name": "job",
|
||||
"permissions": []
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
bundle:
|
||||
name: path_translation_nominal
|
||||
name: fallback
|
||||
|
||||
include:
|
||||
- "resources/*.yml"
|
|
@ -0,0 +1,67 @@
|
|||
[
|
||||
{
|
||||
"job_cluster_key": "default",
|
||||
"notebook_task": {
|
||||
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
|
||||
},
|
||||
"task_key": "notebook_example"
|
||||
},
|
||||
{
|
||||
"job_cluster_key": "default",
|
||||
"spark_python_task": {
|
||||
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
|
||||
},
|
||||
"task_key": "spark_python_example"
|
||||
},
|
||||
{
|
||||
"dbt_task": {
|
||||
"commands": [
|
||||
"dbt run",
|
||||
"dbt run"
|
||||
],
|
||||
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/dbt_project"
|
||||
},
|
||||
"job_cluster_key": "default",
|
||||
"task_key": "dbt_example"
|
||||
},
|
||||
{
|
||||
"job_cluster_key": "default",
|
||||
"sql_task": {
|
||||
"file": {
|
||||
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
|
||||
},
|
||||
"warehouse_id": "cafef00d"
|
||||
},
|
||||
"task_key": "sql_example"
|
||||
},
|
||||
{
|
||||
"job_cluster_key": "default",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "dist/wheel1.whl"
|
||||
},
|
||||
{
|
||||
"whl": "dist/wheel2.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"package_name": "my_package"
|
||||
},
|
||||
"task_key": "python_wheel_example"
|
||||
},
|
||||
{
|
||||
"job_cluster_key": "default",
|
||||
"libraries": [
|
||||
{
|
||||
"jar": "target/jar1.jar"
|
||||
},
|
||||
{
|
||||
"jar": "target/jar2.jar"
|
||||
}
|
||||
],
|
||||
"spark_jar_task": {
|
||||
"main_class_name": "com.example.Main"
|
||||
},
|
||||
"task_key": "spark_jar_example"
|
||||
}
|
||||
]
|
|
@ -0,0 +1,22 @@
|
|||
[
|
||||
{
|
||||
"file": {
|
||||
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file1.py"
|
||||
}
|
||||
},
|
||||
{
|
||||
"notebook": {
|
||||
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"file": {
|
||||
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file2.py"
|
||||
}
|
||||
},
|
||||
{
|
||||
"notebook": {
|
||||
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook2"
|
||||
}
|
||||
}
|
||||
]
|
|
@ -0,0 +1,16 @@
|
|||
|
||||
>>> [CLI] bundle validate -t development -o json
|
||||
|
||||
>>> [CLI] bundle validate -t error
|
||||
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||
|
||||
Name: fallback
|
||||
Target: error
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/fallback/error
|
||||
|
||||
Found 1 error
|
||||
|
||||
Exit code: 1
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue