mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin/main' into remove-run-as
This commit is contained in:
commit
dfacb81826
|
@ -1 +1 @@
|
||||||
779817ed8d63031f5ea761fbd25ee84f38feec0d
|
c72c58f97b950fcb924a90ef164bcb10cfcd5ece
|
|
@ -109,16 +109,19 @@ var {{.CamelName}}Overrides []func(
|
||||||
{{- end }}
|
{{- end }}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
{{- $excludeFromJson := list "http-request"}}
|
||||||
|
|
||||||
func new{{.PascalName}}() *cobra.Command {
|
func new{{.PascalName}}() *cobra.Command {
|
||||||
cmd := &cobra.Command{}
|
cmd := &cobra.Command{}
|
||||||
|
|
||||||
|
{{- $canUseJson := and .CanUseJson (not (in $excludeFromJson .KebabName )) -}}
|
||||||
{{- if .Request}}
|
{{- if .Request}}
|
||||||
|
|
||||||
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
|
var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}}
|
||||||
{{- if .RequestBodyField }}
|
{{- if .RequestBodyField }}
|
||||||
{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
|
{{.CamelName}}Req.{{.RequestBodyField.PascalName}} = &{{.Service.Package.Name}}.{{.RequestBodyField.Entity.PascalName}}{}
|
||||||
{{- end }}
|
{{- end }}
|
||||||
{{- if .CanUseJson}}
|
{{- if $canUseJson}}
|
||||||
var {{.CamelName}}Json flags.JsonFlag
|
var {{.CamelName}}Json flags.JsonFlag
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
@ -135,7 +138,7 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
{{- $request = .RequestBodyField.Entity -}}
|
{{- $request = .RequestBodyField.Entity -}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{if $request }}// TODO: short flags
|
{{if $request }}// TODO: short flags
|
||||||
{{- if .CanUseJson}}
|
{{- if $canUseJson}}
|
||||||
cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
|
cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`)
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{$method := .}}
|
{{$method := .}}
|
||||||
|
@ -177,7 +180,7 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
|
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
|
||||||
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
|
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
|
||||||
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
|
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
|
||||||
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and .CanUseJson (or $request.HasRequiredRequestBodyFields )) -}}
|
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
|
||||||
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
|
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
|
||||||
|
|
||||||
{{- $atleastOneArgumentWithDescription := false -}}
|
{{- $atleastOneArgumentWithDescription := false -}}
|
||||||
|
@ -239,7 +242,7 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
ctx := cmd.Context()
|
ctx := cmd.Context()
|
||||||
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
|
{{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}}
|
||||||
{{- if .Request }}
|
{{- if .Request }}
|
||||||
{{ if .CanUseJson }}
|
{{ if $canUseJson }}
|
||||||
if cmd.Flags().Changed("json") {
|
if cmd.Flags().Changed("json") {
|
||||||
diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
|
diags := {{.CamelName}}Json.Unmarshal(&{{.CamelName}}Req{{ if .RequestBodyField }}.{{.RequestBodyField.PascalName}}{{ end }})
|
||||||
if diags.HasError() {
|
if diags.HasError() {
|
||||||
|
@ -255,7 +258,7 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
return fmt.Errorf("please provide command input in JSON format by specifying the --json flag")
|
||||||
}{{- end}}
|
}{{- end}}
|
||||||
{{- if $hasPosArgs }}
|
{{- if $hasPosArgs }}
|
||||||
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
|
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }} else {
|
||||||
{{- end}}
|
{{- end}}
|
||||||
{{- if $hasIdPrompt}}
|
{{- if $hasIdPrompt}}
|
||||||
if len(args) == 0 {
|
if len(args) == 0 {
|
||||||
|
@ -279,9 +282,9 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
|
|
||||||
{{$method := .}}
|
{{$method := .}}
|
||||||
{{- range $arg, $field := .RequiredPositionalArguments}}
|
{{- range $arg, $field := .RequiredPositionalArguments}}
|
||||||
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt)}}
|
{{- template "args-scan" (dict "Arg" $arg "Field" $field "Method" $method "HasIdPrompt" $hasIdPrompt "ExcludeFromJson" $excludeFromJson)}}
|
||||||
{{- end -}}
|
{{- end -}}
|
||||||
{{- if and .CanUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
|
{{- if and $canUseJson $hasSingleRequiredRequestBodyFieldWithPrompt }}
|
||||||
}
|
}
|
||||||
{{- end}}
|
{{- end}}
|
||||||
|
|
||||||
|
@ -392,7 +395,8 @@ func new{{.PascalName}}() *cobra.Command {
|
||||||
{{- $method := .Method -}}
|
{{- $method := .Method -}}
|
||||||
{{- $arg := .Arg -}}
|
{{- $arg := .Arg -}}
|
||||||
{{- $hasIdPrompt := .HasIdPrompt -}}
|
{{- $hasIdPrompt := .HasIdPrompt -}}
|
||||||
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $method.CanUseJson) }}
|
{{ $canUseJson := and $method.CanUseJson (not (in .ExcludeFromJson $method.KebabName)) }}
|
||||||
|
{{- $optionalIfJsonIsUsed := and (not $hasIdPrompt) (and $field.IsRequestBodyField $canUseJson) }}
|
||||||
{{- if $optionalIfJsonIsUsed }}
|
{{- if $optionalIfJsonIsUsed }}
|
||||||
if !cmd.Flags().Changed("json") {
|
if !cmd.Flags().Changed("json") {
|
||||||
{{- end }}
|
{{- end }}
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
cmd/account/access-control/access-control.go linguist-generated=true
|
cmd/account/access-control/access-control.go linguist-generated=true
|
||||||
cmd/account/billable-usage/billable-usage.go linguist-generated=true
|
cmd/account/billable-usage/billable-usage.go linguist-generated=true
|
||||||
|
cmd/account/budget-policy/budget-policy.go linguist-generated=true
|
||||||
cmd/account/budgets/budgets.go linguist-generated=true
|
cmd/account/budgets/budgets.go linguist-generated=true
|
||||||
cmd/account/cmd.go linguist-generated=true
|
cmd/account/cmd.go linguist-generated=true
|
||||||
cmd/account/credentials/credentials.go linguist-generated=true
|
cmd/account/credentials/credentials.go linguist-generated=true
|
||||||
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
|
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
|
||||||
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
|
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
|
||||||
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
|
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
|
||||||
|
cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated=true
|
||||||
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
|
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
|
||||||
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
|
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
|
||||||
cmd/account/federation-policy/federation-policy.go linguist-generated=true
|
cmd/account/federation-policy/federation-policy.go linguist-generated=true
|
||||||
|
@ -31,6 +33,7 @@ cmd/account/users/users.go linguist-generated=true
|
||||||
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
|
cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true
|
||||||
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
|
cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true
|
||||||
cmd/account/workspaces/workspaces.go linguist-generated=true
|
cmd/account/workspaces/workspaces.go linguist-generated=true
|
||||||
|
cmd/workspace/access-control/access-control.go linguist-generated=true
|
||||||
cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
|
cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go linguist-generated=true
|
||||||
cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
|
cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go linguist-generated=true
|
||||||
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
|
cmd/workspace/alerts-legacy/alerts-legacy.go linguist-generated=true
|
||||||
|
@ -74,6 +77,7 @@ cmd/workspace/instance-pools/instance-pools.go linguist-generated=true
|
||||||
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
|
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
|
||||||
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
|
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
|
||||||
cmd/workspace/jobs/jobs.go linguist-generated=true
|
cmd/workspace/jobs/jobs.go linguist-generated=true
|
||||||
|
cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true
|
||||||
cmd/workspace/lakeview/lakeview.go linguist-generated=true
|
cmd/workspace/lakeview/lakeview.go linguist-generated=true
|
||||||
cmd/workspace/libraries/libraries.go linguist-generated=true
|
cmd/workspace/libraries/libraries.go linguist-generated=true
|
||||||
cmd/workspace/metastores/metastores.go linguist-generated=true
|
cmd/workspace/metastores/metastores.go linguist-generated=true
|
||||||
|
@ -98,11 +102,13 @@ cmd/workspace/providers/providers.go linguist-generated=true
|
||||||
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
|
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
|
||||||
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
|
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
|
||||||
cmd/workspace/queries/queries.go linguist-generated=true
|
cmd/workspace/queries/queries.go linguist-generated=true
|
||||||
|
cmd/workspace/query-execution/query-execution.go linguist-generated=true
|
||||||
cmd/workspace/query-history/query-history.go linguist-generated=true
|
cmd/workspace/query-history/query-history.go linguist-generated=true
|
||||||
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
|
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
|
||||||
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
|
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
|
||||||
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
|
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
|
||||||
cmd/workspace/recipients/recipients.go linguist-generated=true
|
cmd/workspace/recipients/recipients.go linguist-generated=true
|
||||||
|
cmd/workspace/redash-config/redash-config.go linguist-generated=true
|
||||||
cmd/workspace/registered-models/registered-models.go linguist-generated=true
|
cmd/workspace/registered-models/registered-models.go linguist-generated=true
|
||||||
cmd/workspace/repos/repos.go linguist-generated=true
|
cmd/workspace/repos/repos.go linguist-generated=true
|
||||||
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true
|
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
* @pietern @andrewnester @shreyas-goenka @denik
|
* @pietern @andrewnester @shreyas-goenka @denik
|
||||||
|
cmd/labs @alexott @nfx
|
||||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
|
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
||||||
with:
|
with:
|
||||||
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
||||||
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
||||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -23,7 +23,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- name: Generate GitHub App Token
|
- name: Generate GitHub App Token
|
||||||
id: generate-token
|
id: generate-token
|
||||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||||
with:
|
with:
|
||||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||||
|
|
|
@ -10,19 +10,65 @@ on:
|
||||||
jobs:
|
jobs:
|
||||||
publish-to-winget-pkgs:
|
publish-to-winget-pkgs:
|
||||||
runs-on:
|
runs-on:
|
||||||
group: databricks-protected-runner-group
|
group: databricks-deco-testing-runner-group
|
||||||
labels: windows-server-latest
|
labels: ubuntu-latest-deco
|
||||||
|
|
||||||
environment: release
|
environment: release
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: vedantmgoyal2009/winget-releaser@93fd8b606a1672ec3e5c6c3bb19426be68d1a8b0 # v2
|
- name: Checkout repository and submodules
|
||||||
with:
|
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
identifier: Databricks.DatabricksCLI
|
|
||||||
installers-regex: 'windows_.*-signed\.zip$' # Only signed Windows releases
|
# When updating the version of komac, make sure to update the checksum in the next step.
|
||||||
token: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
# Find both at https://github.com/russellbanks/Komac/releases.
|
||||||
fork-user: eng-dev-ecosystem-bot
|
- name: Download komac binary
|
||||||
|
run: |
|
||||||
|
curl -s -L -o $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz https://github.com/russellbanks/Komac/releases/download/v2.9.0/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz
|
||||||
|
|
||||||
|
- name: Verify komac binary
|
||||||
|
run: |
|
||||||
|
echo "d07a12831ad5418fee715488542a98ce3c0e591d05c850dd149fe78432be8c4c $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz" | sha256sum -c -
|
||||||
|
|
||||||
|
- name: Untar komac binary to temporary path
|
||||||
|
run: |
|
||||||
|
mkdir -p $RUNNER_TEMP/komac
|
||||||
|
tar -xzf $RUNNER_TEMP/komac-2.9.0-x86_64-unknown-linux-gnu.tar.gz -C $RUNNER_TEMP/komac
|
||||||
|
|
||||||
|
- name: Add komac to PATH
|
||||||
|
run: echo "$RUNNER_TEMP/komac" >> $GITHUB_PATH
|
||||||
|
|
||||||
|
- name: Confirm komac version
|
||||||
|
run: komac --version
|
||||||
|
|
||||||
# Use the tag from the input, or the ref name if the input is not provided.
|
# Use the tag from the input, or the ref name if the input is not provided.
|
||||||
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
# The ref name is equal to the tag name when this workflow is triggered by the "sign-cli" command.
|
||||||
release-tag: ${{ inputs.tag || github.ref_name }}
|
- name: Strip "v" prefix from version
|
||||||
|
id: strip_version
|
||||||
|
run: echo "version=$(echo ${{ inputs.tag || github.ref_name }} | sed 's/^v//')" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
|
- name: Get URLs of signed Windows binaries
|
||||||
|
id: get_windows_urls
|
||||||
|
run: |
|
||||||
|
urls=$(
|
||||||
|
gh api https://api.github.com/repos/databricks/cli/releases/tags/${{ inputs.tag || github.ref_name }} | \
|
||||||
|
jq -r .assets[].browser_download_url | \
|
||||||
|
grep -E '_windows_.*-signed\.zip$' | \
|
||||||
|
tr '\n' ' '
|
||||||
|
)
|
||||||
|
if [ -z "$urls" ]; then
|
||||||
|
echo "No signed Windows binaries found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo "urls=$urls" >> "$GITHUB_OUTPUT"
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Publish to Winget
|
||||||
|
run: |
|
||||||
|
komac update Databricks.DatabricksCLI \
|
||||||
|
--version ${{ steps.strip_version.outputs.version }} \
|
||||||
|
--submit \
|
||||||
|
--urls ${{ steps.get_windows_urls.outputs.urls }} \
|
||||||
|
env:
|
||||||
|
KOMAC_FORK_OWNER: eng-dev-ecosystem-bot
|
||||||
|
GITHUB_TOKEN: ${{ secrets.ENG_DEV_ECOSYSTEM_BOT_TOKEN }}
|
||||||
|
|
|
@ -50,7 +50,7 @@ jobs:
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version-file: go.mod
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||||
|
@ -60,12 +60,6 @@ jobs:
|
||||||
- name: Install uv
|
- name: Install uv
|
||||||
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
uses: astral-sh/setup-uv@887a942a15af3a7626099df99e897a18d9e5ab3a # v5.1.0
|
||||||
|
|
||||||
- name: Run ruff
|
|
||||||
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
|
|
||||||
with:
|
|
||||||
version: "0.9.1"
|
|
||||||
args: "format --check"
|
|
||||||
|
|
||||||
- name: Set go env
|
- name: Set go env
|
||||||
run: |
|
run: |
|
||||||
echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
|
echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV
|
||||||
|
@ -80,7 +74,7 @@ jobs:
|
||||||
- name: Run tests with coverage
|
- name: Run tests with coverage
|
||||||
run: make cover
|
run: make cover
|
||||||
|
|
||||||
golangci:
|
linters:
|
||||||
needs: cleanups
|
needs: cleanups
|
||||||
name: lint
|
name: lint
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -88,7 +82,7 @@ jobs:
|
||||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||||
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version-file: go.mod
|
||||||
# Use different schema from regular job, to avoid overwriting the same key
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
cache-dependency-path: |
|
cache-dependency-path: |
|
||||||
go.sum
|
go.sum
|
||||||
|
@ -101,10 +95,15 @@ jobs:
|
||||||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||||
git diff --exit-code
|
git diff --exit-code
|
||||||
- name: golangci-lint
|
- name: golangci-lint
|
||||||
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1
|
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
|
||||||
with:
|
with:
|
||||||
version: v1.63.4
|
version: v1.63.4
|
||||||
args: --timeout=15m
|
args: --timeout=15m
|
||||||
|
- name: Run ruff
|
||||||
|
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
|
||||||
|
with:
|
||||||
|
version: "0.9.1"
|
||||||
|
args: "format --check"
|
||||||
|
|
||||||
validate-bundle-schema:
|
validate-bundle-schema:
|
||||||
needs: cleanups
|
needs: cleanups
|
||||||
|
@ -117,7 +116,7 @@ jobs:
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version-file: go.mod
|
||||||
# Use different schema from regular job, to avoid overwriting the same key
|
# Use different schema from regular job, to avoid overwriting the same key
|
||||||
cache-dependency-path: |
|
cache-dependency-path: |
|
||||||
go.sum
|
go.sum
|
||||||
|
|
|
@ -34,7 +34,7 @@ jobs:
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version-file: go.mod
|
||||||
|
|
||||||
# The default cache key for this action considers only the `go.sum` file.
|
# The default cache key for this action considers only the `go.sum` file.
|
||||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||||
|
|
|
@ -26,7 +26,7 @@ jobs:
|
||||||
- name: Setup Go
|
- name: Setup Go
|
||||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||||
with:
|
with:
|
||||||
go-version: 1.23.4
|
go-version-file: go.mod
|
||||||
|
|
||||||
# The default cache key for this action considers only the `go.sum` file.
|
# The default cache key for this action considers only the `go.sum` file.
|
||||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||||
|
|
|
@ -25,11 +25,7 @@ coverage-acceptance.txt
|
||||||
__pycache__
|
__pycache__
|
||||||
*.pyc
|
*.pyc
|
||||||
|
|
||||||
.terraform
|
|
||||||
.terraform.lock.hcl
|
|
||||||
|
|
||||||
.vscode/launch.json
|
.vscode/launch.json
|
||||||
.vscode/tasks.json
|
.vscode/tasks.json
|
||||||
|
|
||||||
.databricks
|
|
||||||
.ruff_cache
|
.ruff_cache
|
||||||
|
|
61
CHANGELOG.md
61
CHANGELOG.md
|
@ -1,5 +1,66 @@
|
||||||
# Version changelog
|
# Version changelog
|
||||||
|
|
||||||
|
## [Release] Release v0.241.2
|
||||||
|
|
||||||
|
This is a bugfix release to address an issue where jobs with tasks with a
|
||||||
|
libraries section with PyPI packages could not be deployed.
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Revert changes related to basename check for local libraries ([#2345](https://github.com/databricks/cli/pull/2345)).
|
||||||
|
|
||||||
|
## [Release] Release v0.241.1
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Fix for regression deploying resources with PyPi and Maven library types ([#2341](https://github.com/databricks/cli/pull/2341)).
|
||||||
|
|
||||||
|
## [Release] Release v0.241.0
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Added support to generate Git based jobs ([#2304](https://github.com/databricks/cli/pull/2304)).
|
||||||
|
* Added support for run_as in pipelines ([#2287](https://github.com/databricks/cli/pull/2287)).
|
||||||
|
* Raise an error when there are multiple local libraries with the same basename used ([#2297](https://github.com/databricks/cli/pull/2297)).
|
||||||
|
* Fix env variable for AzureCli local config ([#2248](https://github.com/databricks/cli/pull/2248)).
|
||||||
|
* Accept JSON files in includes section ([#2265](https://github.com/databricks/cli/pull/2265)).
|
||||||
|
* Always print warnings and errors; clean up format ([#2213](https://github.com/databricks/cli/pull/2213))
|
||||||
|
|
||||||
|
API Changes:
|
||||||
|
* Added `databricks account budget-policy` command group.
|
||||||
|
* Added `databricks lakeview-embedded` command group.
|
||||||
|
* Added `databricks query-execution` command group.
|
||||||
|
* Added `databricks account enable-ip-access-lists` command group.
|
||||||
|
* Added `databricks redash-config` command group.
|
||||||
|
|
||||||
|
OpenAPI commit c72c58f97b950fcb924a90ef164bcb10cfcd5ece (2025-02-03)
|
||||||
|
Dependency updates:
|
||||||
|
* Upgrade to TF provider 1.65.1 ([#2328](https://github.com/databricks/cli/pull/2328)).
|
||||||
|
* Bump github.com/hashicorp/terraform-exec from 0.21.0 to 0.22.0 ([#2237](https://github.com/databricks/cli/pull/2237)).
|
||||||
|
* Bump github.com/spf13/pflag from 1.0.5 to 1.0.6 ([#2281](https://github.com/databricks/cli/pull/2281)).
|
||||||
|
* Bump github.com/databricks/databricks-sdk-go from 0.56.1 to 0.57.0 ([#2321](https://github.com/databricks/cli/pull/2321)).
|
||||||
|
* Bump golang.org/x/oauth2 from 0.25.0 to 0.26.0 ([#2322](https://github.com/databricks/cli/pull/2322)).
|
||||||
|
* Bump golang.org/x/term from 0.28.0 to 0.29.0 ([#2325](https://github.com/databricks/cli/pull/2325)).
|
||||||
|
* Bump golang.org/x/text from 0.21.0 to 0.22.0 ([#2323](https://github.com/databricks/cli/pull/2323)).
|
||||||
|
* Bump golang.org/x/mod from 0.22.0 to 0.23.0 ([#2324](https://github.com/databricks/cli/pull/2324)).
|
||||||
|
|
||||||
|
## [Release] Release v0.240.0
|
||||||
|
|
||||||
|
Bundles:
|
||||||
|
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
|
||||||
|
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
|
||||||
|
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
|
||||||
|
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
|
||||||
|
|
||||||
|
API Changes:
|
||||||
|
* Added `databricks access-control` command group.
|
||||||
|
* Added `databricks serving-endpoints http-request` command.
|
||||||
|
* Changed `databricks serving-endpoints create` command with new required argument order.
|
||||||
|
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
|
||||||
|
* Changed `databricks recipients update` command return type to become non-empty.
|
||||||
|
|
||||||
|
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
|
||||||
|
Dependency updates:
|
||||||
|
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
|
||||||
|
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
|
||||||
|
|
||||||
## [Release] Release v0.239.1
|
## [Release] Release v0.239.1
|
||||||
|
|
||||||
CLI:
|
CLI:
|
||||||
|
|
21
Makefile
21
Makefile
|
@ -1,4 +1,4 @@
|
||||||
default: vendor fmt lint
|
default: vendor fmt lint tidy
|
||||||
|
|
||||||
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||||
|
|
||||||
|
@ -9,6 +9,10 @@ GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
|
||||||
lint:
|
lint:
|
||||||
golangci-lint run --fix
|
golangci-lint run --fix
|
||||||
|
|
||||||
|
tidy:
|
||||||
|
@# not part of golangci-lint, apparently
|
||||||
|
go mod tidy
|
||||||
|
|
||||||
lintcheck:
|
lintcheck:
|
||||||
golangci-lint run ./...
|
golangci-lint run ./...
|
||||||
|
|
||||||
|
@ -24,7 +28,7 @@ test:
|
||||||
|
|
||||||
cover:
|
cover:
|
||||||
rm -fr ./acceptance/build/cover/
|
rm -fr ./acceptance/build/cover/
|
||||||
CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
VERBOSE_TEST=1 CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
||||||
rm -fr ./acceptance/build/cover-merged/
|
rm -fr ./acceptance/build/cover-merged/
|
||||||
mkdir -p acceptance/build/cover-merged/
|
mkdir -p acceptance/build/cover-merged/
|
||||||
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
||||||
|
@ -48,12 +52,15 @@ vendor:
|
||||||
schema:
|
schema:
|
||||||
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||||
|
|
||||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
docs:
|
||||||
|
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
|
||||||
|
|
||||||
integration:
|
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./acceptance ./integration/..." -- -parallel 4 -timeout=2h
|
||||||
|
|
||||||
|
integration: vendor
|
||||||
$(INTEGRATION)
|
$(INTEGRATION)
|
||||||
|
|
||||||
integration-short:
|
integration-short: vendor
|
||||||
$(INTEGRATION) -short
|
VERBOSE_TEST=1 $(INTEGRATION) -short
|
||||||
|
|
||||||
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
|
.PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs
|
||||||
|
|
9
NOTICE
9
NOTICE
|
@ -109,3 +109,12 @@ License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
||||||
https://github.com/BurntSushi/toml
|
https://github.com/BurntSushi/toml
|
||||||
Copyright (c) 2013 TOML authors
|
Copyright (c) 2013 TOML authors
|
||||||
https://github.com/BurntSushi/toml/blob/master/COPYING
|
https://github.com/BurntSushi/toml/blob/master/COPYING
|
||||||
|
|
||||||
|
dario.cat/mergo
|
||||||
|
Copyright (c) 2013 Dario Castañé. All rights reserved.
|
||||||
|
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||||
|
https://github.com/darccio/mergo/blob/master/LICENSE
|
||||||
|
|
||||||
|
https://github.com/gorilla/mux
|
||||||
|
Copyright (c) 2023 The Gorilla Authors. All rights reserved.
|
||||||
|
https://github.com/gorilla/mux/blob/main/LICENSE
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
build
|
|
@ -17,3 +17,5 @@ For more complex tests one can also use:
|
||||||
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
- `errcode` helper: if the command fails with non-zero code, it appends `Exit code: N` to the output but returns success to caller (bash), allowing continuation of script.
|
||||||
- `trace` helper: prints the arguments before executing the command.
|
- `trace` helper: prints the arguments before executing the command.
|
||||||
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
- custom output files: redirect output to custom file (it must start with `out`), e.g. `$CLI bundle validate > out.txt 2> out.error.txt`.
|
||||||
|
|
||||||
|
See [selftest](./selftest) for a toy test.
|
||||||
|
|
|
@ -2,6 +2,7 @@ package acceptance_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
@ -9,21 +10,31 @@ import (
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"runtime"
|
"runtime"
|
||||||
"slices"
|
"slices"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
|
||||||
"github.com/databricks/cli/internal/testutil"
|
"github.com/databricks/cli/internal/testutil"
|
||||||
"github.com/databricks/cli/libs/env"
|
"github.com/databricks/cli/libs/env"
|
||||||
"github.com/databricks/cli/libs/testdiff"
|
"github.com/databricks/cli/libs/testdiff"
|
||||||
|
"github.com/databricks/cli/libs/testserver"
|
||||||
"github.com/databricks/databricks-sdk-go"
|
"github.com/databricks/databricks-sdk-go"
|
||||||
|
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
var KeepTmp bool
|
var (
|
||||||
|
KeepTmp bool
|
||||||
|
NoRepl bool
|
||||||
|
VerboseTest bool = os.Getenv("VERBOSE_TEST") != ""
|
||||||
|
)
|
||||||
|
|
||||||
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
|
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
|
||||||
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
|
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
|
||||||
|
@ -38,12 +49,16 @@ var InprocessMode bool
|
||||||
func init() {
|
func init() {
|
||||||
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
|
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
|
||||||
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
|
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
|
||||||
|
flag.BoolVar(&NoRepl, "norepl", false, "Do not apply any replacements (for debugging)")
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
EntryPointScript = "script"
|
EntryPointScript = "script"
|
||||||
CleanupScript = "script.cleanup"
|
CleanupScript = "script.cleanup"
|
||||||
PrepareScript = "script.prepare"
|
PrepareScript = "script.prepare"
|
||||||
|
MaxFileSize = 100_000
|
||||||
|
// Filename to save replacements to (used by diff.py)
|
||||||
|
ReplsFile = "repls.json"
|
||||||
)
|
)
|
||||||
|
|
||||||
var Scripts = map[string]bool{
|
var Scripts = map[string]bool{
|
||||||
|
@ -52,6 +67,10 @@ var Scripts = map[string]bool{
|
||||||
PrepareScript: true,
|
PrepareScript: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var Ignored = map[string]bool{
|
||||||
|
ReplsFile: true,
|
||||||
|
}
|
||||||
|
|
||||||
func TestAccept(t *testing.T) {
|
func TestAccept(t *testing.T) {
|
||||||
testAccept(t, InprocessMode, SingleTest)
|
testAccept(t, InprocessMode, SingleTest)
|
||||||
}
|
}
|
||||||
|
@ -60,12 +79,8 @@ func TestInprocessMode(t *testing.T) {
|
||||||
if InprocessMode {
|
if InprocessMode {
|
||||||
t.Skip("Already tested by TestAccept")
|
t.Skip("Already tested by TestAccept")
|
||||||
}
|
}
|
||||||
if runtime.GOOS == "windows" {
|
require.Equal(t, 1, testAccept(t, true, "selftest/basic"))
|
||||||
// - catalogs A catalog is the first layer of Unity Catalog’s three-level namespace.
|
require.Equal(t, 1, testAccept(t, true, "selftest/server"))
|
||||||
// + catalogs A catalog is the first layer of Unity Catalog<6F>s three-level namespace.
|
|
||||||
t.Skip("Fails on CI on unicode characters")
|
|
||||||
}
|
|
||||||
require.NotZero(t, testAccept(t, true, "help"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
|
@ -73,6 +88,11 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
cwd, err := os.Getwd()
|
cwd, err := os.Getwd()
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
buildDir := filepath.Join(cwd, "build", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
|
||||||
|
|
||||||
|
// Download terraform and provider and create config; this also creates build directory.
|
||||||
|
RunCommand(t, []string{"python3", filepath.Join(cwd, "install_terraform.py"), "--targetdir", buildDir}, ".")
|
||||||
|
|
||||||
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
||||||
|
|
||||||
if coverDir != "" {
|
if coverDir != "" {
|
||||||
|
@ -89,46 +109,58 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
|
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
|
||||||
execPath = filepath.Join(cwd, "bin", "callserver.py")
|
execPath = filepath.Join(cwd, "bin", "callserver.py")
|
||||||
} else {
|
} else {
|
||||||
execPath = BuildCLI(t, cwd, coverDir)
|
execPath = BuildCLI(t, buildDir, coverDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
t.Setenv("CLI", execPath)
|
t.Setenv("CLI", execPath)
|
||||||
repls.SetPath(execPath, "$CLI")
|
repls.SetPath(execPath, "[CLI]")
|
||||||
|
|
||||||
// Make helper scripts available
|
// Make helper scripts available
|
||||||
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||||
|
|
||||||
tempHomeDir := t.TempDir()
|
tempHomeDir := t.TempDir()
|
||||||
repls.SetPath(tempHomeDir, "$TMPHOME")
|
repls.SetPath(tempHomeDir, "[TMPHOME]")
|
||||||
t.Logf("$TMPHOME=%v", tempHomeDir)
|
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||||
|
|
||||||
// Prevent CLI from downloading terraform in each test:
|
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
|
||||||
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
uvCache := getUVDefaultCacheDir(t)
|
||||||
|
t.Setenv("UV_CACHE_DIR", uvCache)
|
||||||
|
|
||||||
ctx := context.Background()
|
|
||||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||||
|
|
||||||
if cloudEnv == "" {
|
if cloudEnv == "" {
|
||||||
server := StartServer(t)
|
defaultServer := testserver.New(t)
|
||||||
AddHandlers(server)
|
AddHandlers(defaultServer)
|
||||||
// Redirect API access to local server:
|
t.Setenv("DATABRICKS_DEFAULT_HOST", defaultServer.URL)
|
||||||
t.Setenv("DATABRICKS_HOST", server.URL)
|
|
||||||
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
|
||||||
|
|
||||||
homeDir := t.TempDir()
|
homeDir := t.TempDir()
|
||||||
// Do not read user's ~/.databrickscfg
|
// Do not read user's ~/.databrickscfg
|
||||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
workspaceClient, err := databricks.NewWorkspaceClient()
|
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
|
||||||
require.NoError(t, err)
|
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||||
|
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||||
|
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
|
||||||
|
|
||||||
user, err := workspaceClient.CurrentUser.Me(ctx)
|
terraformExecPath := filepath.Join(buildDir, "terraform")
|
||||||
require.NoError(t, err)
|
if runtime.GOOS == "windows" {
|
||||||
require.NotNil(t, user)
|
terraformExecPath += ".exe"
|
||||||
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
}
|
||||||
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
|
||||||
testdiff.PrepareReplacementsUUID(t, &repls)
|
t.Setenv("TERRAFORM", terraformExecPath)
|
||||||
|
repls.SetPath(terraformExecPath, "[TERRAFORM]")
|
||||||
|
|
||||||
|
// do it last so that full paths match first:
|
||||||
|
repls.SetPath(buildDir, "[BUILD_DIR]")
|
||||||
|
|
||||||
|
testdiff.PrepareReplacementsDevVersion(t, &repls)
|
||||||
|
testdiff.PrepareReplacementSdkVersion(t, &repls)
|
||||||
|
testdiff.PrepareReplacementsGoVersion(t, &repls)
|
||||||
|
|
||||||
|
repls.SetPath(cwd, "[TESTROOT]")
|
||||||
|
|
||||||
|
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
|
||||||
|
|
||||||
testDirs := getTests(t)
|
testDirs := getTests(t)
|
||||||
require.NotEmpty(t, testDirs)
|
require.NotEmpty(t, testDirs)
|
||||||
|
@ -141,8 +173,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, dir := range testDirs {
|
for _, dir := range testDirs {
|
||||||
testName := strings.ReplaceAll(dir, "\\", "/")
|
t.Run(dir, func(t *testing.T) {
|
||||||
t.Run(testName, func(t *testing.T) {
|
|
||||||
if !InprocessMode {
|
if !InprocessMode {
|
||||||
t.Parallel()
|
t.Parallel()
|
||||||
}
|
}
|
||||||
|
@ -164,7 +195,8 @@ func getTests(t *testing.T) []string {
|
||||||
name := filepath.Base(path)
|
name := filepath.Base(path)
|
||||||
if name == EntryPointScript {
|
if name == EntryPointScript {
|
||||||
// Presence of 'script' marks a test case in this directory
|
// Presence of 'script' marks a test case in this directory
|
||||||
testDirs = append(testDirs, filepath.Dir(path))
|
testName := filepath.ToSlash(filepath.Dir(path))
|
||||||
|
testDirs = append(testDirs, testName)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
|
@ -182,6 +214,11 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||||
|
if config.LocalOnly && cloudEnv != "" {
|
||||||
|
t.Skipf("Disabled via LocalOnly setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
|
||||||
|
}
|
||||||
|
|
||||||
var tmpDir string
|
var tmpDir string
|
||||||
var err error
|
var err error
|
||||||
if KeepTmp {
|
if KeepTmp {
|
||||||
|
@ -194,7 +231,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
tmpDir = t.TempDir()
|
tmpDir = t.TempDir()
|
||||||
}
|
}
|
||||||
|
|
||||||
repls.SetPathWithParents(tmpDir, "$TMPDIR")
|
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
|
||||||
|
|
||||||
scriptContents := readMergedScriptContents(t, dir)
|
scriptContents := readMergedScriptContents(t, dir)
|
||||||
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||||
|
@ -206,6 +243,86 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
|
|
||||||
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||||
cmd := exec.Command(args[0], args[1:]...)
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
cmd.Env = os.Environ()
|
||||||
|
|
||||||
|
var workspaceClient *databricks.WorkspaceClient
|
||||||
|
var user iam.User
|
||||||
|
|
||||||
|
// Start a new server with a custom configuration if the acceptance test
|
||||||
|
// specifies a custom server stubs.
|
||||||
|
var server *testserver.Server
|
||||||
|
|
||||||
|
if cloudEnv == "" {
|
||||||
|
// Start a new server for this test if either:
|
||||||
|
// 1. A custom server spec is defined in the test configuration.
|
||||||
|
// 2. The test is configured to record requests and assert on them. We need
|
||||||
|
// a duplicate of the default server to record requests because the default
|
||||||
|
// server otherwise is a shared resource.
|
||||||
|
|
||||||
|
databricksLocalHost := os.Getenv("DATABRICKS_DEFAULT_HOST")
|
||||||
|
|
||||||
|
if len(config.Server) > 0 || config.RecordRequests {
|
||||||
|
server = testserver.New(t)
|
||||||
|
server.RecordRequests = config.RecordRequests
|
||||||
|
server.IncludeRequestHeaders = config.IncludeRequestHeaders
|
||||||
|
|
||||||
|
// We want later stubs takes precedence, because then leaf configs take precedence over parent directory configs
|
||||||
|
// In gorilla/mux earlier handlers take precedence, so we need to reverse the order
|
||||||
|
slices.Reverse(config.Server)
|
||||||
|
|
||||||
|
for _, stub := range config.Server {
|
||||||
|
require.NotEmpty(t, stub.Pattern)
|
||||||
|
items := strings.Split(stub.Pattern, " ")
|
||||||
|
require.Len(t, items, 2)
|
||||||
|
server.Handle(items[0], items[1], func(req testserver.Request) any {
|
||||||
|
return stub.Response
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// The earliest handlers take precedence, add default handlers last
|
||||||
|
AddHandlers(server)
|
||||||
|
databricksLocalHost = server.URL
|
||||||
|
}
|
||||||
|
|
||||||
|
// Each local test should use a new token that will result into a new fake workspace,
|
||||||
|
// so that test don't interfere with each other.
|
||||||
|
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
|
||||||
|
config := databricks.Config{
|
||||||
|
Host: databricksLocalHost,
|
||||||
|
Token: "dbapi" + tokenSuffix,
|
||||||
|
}
|
||||||
|
workspaceClient, err = databricks.NewWorkspaceClient(&config)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+config.Host)
|
||||||
|
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+config.Token)
|
||||||
|
|
||||||
|
// For the purposes of replacements, use testUser.
|
||||||
|
// Note, users might have overriden /api/2.0/preview/scim/v2/Me but that should not affect the replacement:
|
||||||
|
user = testUser
|
||||||
|
} else {
|
||||||
|
// Use whatever authentication mechanism is configured by the test runner.
|
||||||
|
workspaceClient, err = databricks.NewWorkspaceClient(&databricks.Config{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
pUser, err := workspaceClient.CurrentUser.Me(context.Background())
|
||||||
|
require.NoError(t, err, "Failed to get current user")
|
||||||
|
user = *pUser
|
||||||
|
}
|
||||||
|
|
||||||
|
testdiff.PrepareReplacementsUser(t, &repls, user)
|
||||||
|
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||||
|
|
||||||
|
// Must be added PrepareReplacementsUser, otherwise conflicts with [USERNAME]
|
||||||
|
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||||
|
|
||||||
|
// User replacements come last:
|
||||||
|
repls.Repls = append(repls.Repls, config.Repls...)
|
||||||
|
|
||||||
|
// Save replacements to temp test directory so that it can be read by diff.py
|
||||||
|
replsJson, err := json.MarshalIndent(repls.Repls, "", " ")
|
||||||
|
require.NoError(t, err)
|
||||||
|
testutil.WriteFile(t, filepath.Join(tmpDir, ReplsFile), string(replsJson))
|
||||||
|
|
||||||
if coverDir != "" {
|
if coverDir != "" {
|
||||||
// Creating individual coverage directory for each test, because writing to the same one
|
// Creating individual coverage directory for each test, because writing to the same one
|
||||||
// results in sporadic failures like this one (only if tests are running in parallel):
|
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||||
|
@ -213,9 +330,13 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
||||||
err := os.MkdirAll(coverDir, os.ModePerm)
|
err := os.MkdirAll(coverDir, os.ModePerm)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
cmd.Env = append(os.Environ(), "GOCOVERDIR="+coverDir)
|
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
absDir, err := filepath.Abs(dir)
|
||||||
|
require.NoError(t, err)
|
||||||
|
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)
|
||||||
|
|
||||||
// Write combined output to a file
|
// Write combined output to a file
|
||||||
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -224,17 +345,39 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
cmd.Dir = tmpDir
|
cmd.Dir = tmpDir
|
||||||
err = cmd.Run()
|
err = cmd.Run()
|
||||||
|
|
||||||
|
// Write the requests made to the server to a output file if the test is
|
||||||
|
// configured to record requests.
|
||||||
|
if config.RecordRequests {
|
||||||
|
f, err := os.OpenFile(filepath.Join(tmpDir, "out.requests.txt"), os.O_CREATE|os.O_WRONLY, 0o644)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
for _, req := range server.Requests {
|
||||||
|
reqJson, err := json.MarshalIndent(req, "", " ")
|
||||||
|
require.NoErrorf(t, err, "Failed to indent: %#v", req)
|
||||||
|
|
||||||
|
reqJsonWithRepls := repls.Replace(string(reqJson))
|
||||||
|
_, err = f.WriteString(reqJsonWithRepls + "\n")
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = f.Close()
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
// Include exit code in output (if non-zero)
|
// Include exit code in output (if non-zero)
|
||||||
formatOutput(out, err)
|
formatOutput(out, err)
|
||||||
require.NoError(t, out.Close())
|
require.NoError(t, out.Close())
|
||||||
|
|
||||||
|
printedRepls := false
|
||||||
|
|
||||||
// Compare expected outputs
|
// Compare expected outputs
|
||||||
for relPath := range outputs {
|
for relPath := range outputs {
|
||||||
doComparison(t, repls, dir, tmpDir, relPath)
|
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Make sure there are not unaccounted for new files
|
// Make sure there are not unaccounted for new files
|
||||||
files := ListDir(t, tmpDir)
|
files := ListDir(t, tmpDir)
|
||||||
|
unexpected := []string{}
|
||||||
for _, relPath := range files {
|
for _, relPath := range files {
|
||||||
if _, ok := inputs[relPath]; ok {
|
if _, ok := inputs[relPath]; ok {
|
||||||
continue
|
continue
|
||||||
|
@ -242,35 +385,44 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
||||||
if _, ok := outputs[relPath]; ok {
|
if _, ok := outputs[relPath]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
if _, ok := Ignored[relPath]; ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
unexpected = append(unexpected, relPath)
|
||||||
if strings.HasPrefix(relPath, "out") {
|
if strings.HasPrefix(relPath, "out") {
|
||||||
// We have a new file starting with "out"
|
// We have a new file starting with "out"
|
||||||
// Show the contents & support overwrite mode for it:
|
// Show the contents & support overwrite mode for it:
|
||||||
doComparison(t, repls, dir, tmpDir, relPath)
|
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(unexpected) > 0 {
|
||||||
|
t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string) {
|
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
|
||||||
pathRef := filepath.Join(dirRef, relPath)
|
pathRef := filepath.Join(dirRef, relPath)
|
||||||
pathNew := filepath.Join(dirNew, relPath)
|
pathNew := filepath.Join(dirNew, relPath)
|
||||||
bufRef, okRef := readIfExists(t, pathRef)
|
bufRef, okRef := tryReading(t, pathRef)
|
||||||
bufNew, okNew := readIfExists(t, pathNew)
|
bufNew, okNew := tryReading(t, pathNew)
|
||||||
if !okRef && !okNew {
|
if !okRef && !okNew {
|
||||||
t.Errorf("Both files are missing: %s, %s", pathRef, pathNew)
|
t.Errorf("Both files are missing or have errors: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
valueRef := testdiff.NormalizeNewlines(string(bufRef))
|
valueRef := testdiff.NormalizeNewlines(bufRef)
|
||||||
valueNew := testdiff.NormalizeNewlines(string(bufNew))
|
valueNew := testdiff.NormalizeNewlines(bufNew)
|
||||||
|
|
||||||
// Apply replacements to the new value only.
|
// Apply replacements to the new value only.
|
||||||
// The reference value is stored after applying replacements.
|
// The reference value is stored after applying replacements.
|
||||||
|
if !NoRepl {
|
||||||
valueNew = repls.Replace(valueNew)
|
valueNew = repls.Replace(valueNew)
|
||||||
|
}
|
||||||
|
|
||||||
// The test did not produce an expected output file.
|
// The test did not produce an expected output file.
|
||||||
if okRef && !okNew {
|
if okRef && !okNew {
|
||||||
t.Errorf("Missing output file: %s", relPath)
|
t.Errorf("Missing output file: %s", relPath)
|
||||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
|
||||||
if testdiff.OverwriteMode {
|
if testdiff.OverwriteMode {
|
||||||
t.Logf("Removing output file: %s", relPath)
|
t.Logf("Removing output file: %s", relPath)
|
||||||
require.NoError(t, os.Remove(pathRef))
|
require.NoError(t, os.Remove(pathRef))
|
||||||
|
@ -280,7 +432,7 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
|
||||||
|
|
||||||
// The test produced an unexpected output file.
|
// The test produced an unexpected output file.
|
||||||
if !okRef && okNew {
|
if !okRef && okNew {
|
||||||
t.Errorf("Unexpected output file: %s", relPath)
|
t.Errorf("Unexpected output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||||
if testdiff.OverwriteMode {
|
if testdiff.OverwriteMode {
|
||||||
t.Logf("Writing output file: %s", relPath)
|
t.Logf("Writing output file: %s", relPath)
|
||||||
|
@ -295,6 +447,15 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
|
||||||
t.Logf("Overwriting existing output file: %s", relPath)
|
t.Logf("Overwriting existing output file: %s", relPath)
|
||||||
testutil.WriteFile(t, pathRef, valueNew)
|
testutil.WriteFile(t, pathRef, valueNew)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if VerboseTest && !equal && printedRepls != nil && !*printedRepls {
|
||||||
|
*printedRepls = true
|
||||||
|
var items []string
|
||||||
|
for _, item := range repls.Repls {
|
||||||
|
items = append(items, fmt.Sprintf("REPL %s => %s", item.Old, item.New))
|
||||||
|
}
|
||||||
|
t.Log("Available replacements:\n" + strings.Join(items, "\n"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
// Returns combined script.prepare (root) + script.prepare (parent) + ... + script + ... + script.cleanup (parent) + ...
|
||||||
|
@ -310,14 +471,14 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
cleanups := []string{}
|
cleanups := []string{}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
x, ok := readIfExists(t, filepath.Join(dir, CleanupScript))
|
x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
|
||||||
if ok {
|
if ok {
|
||||||
cleanups = append(cleanups, string(x))
|
cleanups = append(cleanups, x)
|
||||||
}
|
}
|
||||||
|
|
||||||
x, ok = readIfExists(t, filepath.Join(dir, PrepareScript))
|
x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
|
||||||
if ok {
|
if ok {
|
||||||
prepares = append(prepares, string(x))
|
prepares = append(prepares, x)
|
||||||
}
|
}
|
||||||
|
|
||||||
if dir == "" || dir == "." {
|
if dir == "" || dir == "." {
|
||||||
|
@ -334,13 +495,12 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
||||||
return strings.Join(prepares, "\n")
|
return strings.Join(prepares, "\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
func BuildCLI(t *testing.T, buildDir, coverDir string) string {
|
||||||
execPath := filepath.Join(cwd, "build", "databricks")
|
execPath := filepath.Join(buildDir, "databricks")
|
||||||
if runtime.GOOS == "windows" {
|
if runtime.GOOS == "windows" {
|
||||||
execPath += ".exe"
|
execPath += ".exe"
|
||||||
}
|
}
|
||||||
|
|
||||||
start := time.Now()
|
|
||||||
args := []string{
|
args := []string{
|
||||||
"go", "build",
|
"go", "build",
|
||||||
"-mod", "vendor",
|
"-mod", "vendor",
|
||||||
|
@ -358,20 +518,7 @@ func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
||||||
args = append(args, "-buildvcs=false")
|
args = append(args, "-buildvcs=false")
|
||||||
}
|
}
|
||||||
|
|
||||||
cmd := exec.Command(args[0], args[1:]...)
|
RunCommand(t, args, "..")
|
||||||
cmd.Dir = ".."
|
|
||||||
out, err := cmd.CombinedOutput()
|
|
||||||
elapsed := time.Since(start)
|
|
||||||
t.Logf("%s took %s", args, elapsed)
|
|
||||||
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
|
|
||||||
if len(out) > 0 {
|
|
||||||
t.Logf("go build output: %s: %s", args, out)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Quick check + warm up cache:
|
|
||||||
cmd = exec.Command(execPath, "--version")
|
|
||||||
out, err = cmd.CombinedOutput()
|
|
||||||
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
|
|
||||||
return execPath
|
return execPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -404,16 +551,33 @@ func formatOutput(w io.Writer, err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func readIfExists(t *testing.T, path string) ([]byte, bool) {
|
func tryReading(t *testing.T, path string) (string, bool) {
|
||||||
data, err := os.ReadFile(path)
|
info, err := os.Stat(path)
|
||||||
if err == nil {
|
if err != nil {
|
||||||
return data, true
|
if !errors.Is(err, os.ErrNotExist) {
|
||||||
|
t.Errorf("%s: %s", path, err)
|
||||||
|
}
|
||||||
|
return "", false
|
||||||
}
|
}
|
||||||
|
|
||||||
if !errors.Is(err, os.ErrNotExist) {
|
if info.Size() > MaxFileSize {
|
||||||
t.Fatalf("%s: %s", path, err)
|
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
|
||||||
|
return "", false
|
||||||
}
|
}
|
||||||
return []byte{}, false
|
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
// already checked ErrNotExist above
|
||||||
|
t.Errorf("%s: %s", path, err)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
if !utf8.Valid(data) {
|
||||||
|
t.Errorf("%s: not valid utf-8", path)
|
||||||
|
return "", false
|
||||||
|
}
|
||||||
|
|
||||||
|
return string(data), true
|
||||||
}
|
}
|
||||||
|
|
||||||
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||||
|
@ -479,3 +643,30 @@ func ListDir(t *testing.T, src string) []string {
|
||||||
}
|
}
|
||||||
return files
|
return files
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getUVDefaultCacheDir(t *testing.T) string {
|
||||||
|
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
|
||||||
|
// the default cache directory is
|
||||||
|
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
|
||||||
|
cacheDir, err := os.UserCacheDir()
|
||||||
|
require.NoError(t, err)
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
return cacheDir + "\\uv\\cache"
|
||||||
|
} else {
|
||||||
|
return cacheDir + "/uv"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RunCommand(t *testing.T, args []string, dir string) {
|
||||||
|
start := time.Now()
|
||||||
|
cmd := exec.Command(args[0], args[1:]...)
|
||||||
|
cmd.Dir = dir
|
||||||
|
out, err := cmd.CombinedOutput()
|
||||||
|
elapsed := time.Since(start)
|
||||||
|
t.Logf("%s took %s", args, elapsed)
|
||||||
|
|
||||||
|
require.NoError(t, err, "%s failed: %s\n%s", args, err, out)
|
||||||
|
if len(out) > 0 {
|
||||||
|
t.Logf("%s output: %s", args, out)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
[DEFAULT]
|
||||||
|
host = $DATABRICKS_HOST
|
||||||
|
|
||||||
|
[profile_name]
|
||||||
|
host = https://test@non-existing-subdomain.databricks.com
|
|
@ -0,0 +1,14 @@
|
||||||
|
bundle:
|
||||||
|
name: test-auth
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
host: $DATABRICKS_HOST
|
||||||
|
|
||||||
|
targets:
|
||||||
|
dev:
|
||||||
|
default: true
|
||||||
|
workspace:
|
||||||
|
host: $DATABRICKS_HOST
|
||||||
|
prod:
|
||||||
|
workspace:
|
||||||
|
host: https://bar.com
|
|
@ -0,0 +1,32 @@
|
||||||
|
|
||||||
|
=== Inside the bundle, no flags
|
||||||
|
>>> errcode [CLI] current-user me
|
||||||
|
"[USERNAME]"
|
||||||
|
|
||||||
|
=== Inside the bundle, target flags
|
||||||
|
>>> errcode [CLI] current-user me -t dev
|
||||||
|
"[USERNAME]"
|
||||||
|
|
||||||
|
=== Inside the bundle, target and matching profile
|
||||||
|
>>> errcode [CLI] current-user me -t dev -p DEFAULT
|
||||||
|
"[USERNAME]"
|
||||||
|
|
||||||
|
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
|
||||||
|
>>> errcode [CLI] current-user me -p profile_name
|
||||||
|
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
=== Inside the bundle, target and not matching profile
|
||||||
|
>>> errcode [CLI] current-user me -t dev -p profile_name
|
||||||
|
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
=== Outside the bundle, no flags
|
||||||
|
>>> errcode [CLI] current-user me
|
||||||
|
"[USERNAME]"
|
||||||
|
|
||||||
|
=== Outside the bundle, profile flag
|
||||||
|
>>> errcode [CLI] current-user me -p profile_name
|
||||||
|
"[USERNAME]"
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Replace placeholder with an actual host URL
|
||||||
|
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
|
||||||
|
envsubst < .databrickscfg > out && mv out .databrickscfg
|
||||||
|
export DATABRICKS_CONFIG_FILE=.databrickscfg
|
||||||
|
|
||||||
|
host=$DATABRICKS_HOST
|
||||||
|
unset DATABRICKS_HOST
|
||||||
|
|
||||||
|
title "Inside the bundle, no flags"
|
||||||
|
trace errcode $CLI current-user me | jq .userName
|
||||||
|
|
||||||
|
title "Inside the bundle, target flags"
|
||||||
|
trace errcode $CLI current-user me -t dev | jq .userName
|
||||||
|
|
||||||
|
title "Inside the bundle, target and matching profile"
|
||||||
|
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
|
||||||
|
|
||||||
|
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
|
||||||
|
trace errcode $CLI current-user me -p profile_name | jq .userName
|
||||||
|
|
||||||
|
title "Inside the bundle, target and not matching profile"
|
||||||
|
trace errcode $CLI current-user me -t dev -p profile_name
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
export DATABRICKS_HOST=$host
|
||||||
|
title "Outside the bundle, no flags"
|
||||||
|
trace errcode $CLI current-user me | jq .userName
|
||||||
|
|
||||||
|
title "Outside the bundle, profile flag"
|
||||||
|
trace errcode $CLI current-user me -p profile_name | jq .userName
|
|
@ -0,0 +1,12 @@
|
||||||
|
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
|
||||||
|
|
||||||
|
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
|
||||||
|
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
|
||||||
|
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
|
||||||
|
[[Repls]]
|
||||||
|
Old='DATABRICKS_HOST'
|
||||||
|
New='DATABRICKS_TARGET'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old='DATABRICKS_URL'
|
||||||
|
New='DATABRICKS_TARGET'
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"headers": {
|
||||||
|
"Authorization": [
|
||||||
|
"Basic [ENCODED_AUTH]"
|
||||||
|
],
|
||||||
|
"User-Agent": [
|
||||||
|
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/basic"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"method": "GET",
|
||||||
|
"path": "/api/2.0/preview/scim/v2/Me"
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"id":"[USERID]",
|
||||||
|
"userName":"[USERNAME]"
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Unset the token which is configured by default
|
||||||
|
# in acceptance tests
|
||||||
|
export DATABRICKS_TOKEN=""
|
||||||
|
|
||||||
|
export DATABRICKS_USERNAME=username
|
||||||
|
export DATABRICKS_PASSWORD=password
|
||||||
|
|
||||||
|
$CLI current-user me
|
|
@ -0,0 +1,4 @@
|
||||||
|
# "username:password" in base64 is dXNlcm5hbWU6cGFzc3dvcmQ=, expect to see this in Authorization header
|
||||||
|
[[Repls]]
|
||||||
|
Old = "dXNlcm5hbWU6cGFzc3dvcmQ="
|
||||||
|
New = "[ENCODED_AUTH]"
|
|
@ -0,0 +1,34 @@
|
||||||
|
{
|
||||||
|
"headers": {
|
||||||
|
"User-Agent": [
|
||||||
|
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"method": "GET",
|
||||||
|
"path": "/oidc/.well-known/oauth-authorization-server"
|
||||||
|
}
|
||||||
|
{
|
||||||
|
"headers": {
|
||||||
|
"Authorization": [
|
||||||
|
"Basic [ENCODED_AUTH]"
|
||||||
|
],
|
||||||
|
"User-Agent": [
|
||||||
|
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"method": "POST",
|
||||||
|
"path": "/oidc/v1/token",
|
||||||
|
"raw_body": "grant_type=client_credentials\u0026scope=all-apis"
|
||||||
|
}
|
||||||
|
{
|
||||||
|
"headers": {
|
||||||
|
"Authorization": [
|
||||||
|
"Bearer oauth-token"
|
||||||
|
],
|
||||||
|
"User-Agent": [
|
||||||
|
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/oauth-m2m"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"method": "GET",
|
||||||
|
"path": "/api/2.0/preview/scim/v2/Me"
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"id":"[USERID]",
|
||||||
|
"userName":"[USERNAME]"
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
# Unset the token which is configured by default
|
||||||
|
# in acceptance tests
|
||||||
|
export DATABRICKS_TOKEN=""
|
||||||
|
|
||||||
|
export DATABRICKS_CLIENT_ID=client_id
|
||||||
|
export DATABRICKS_CLIENT_SECRET=client_secret
|
||||||
|
|
||||||
|
$CLI current-user me
|
|
@ -0,0 +1,5 @@
|
||||||
|
# "client_id:client_secret" in base64 is Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ=, expect to
|
||||||
|
# see this in Authorization header
|
||||||
|
[[Repls]]
|
||||||
|
Old = "Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="
|
||||||
|
New = "[ENCODED_AUTH]"
|
|
@ -0,0 +1,12 @@
|
||||||
|
{
|
||||||
|
"headers": {
|
||||||
|
"Authorization": [
|
||||||
|
"Bearer dapi1234"
|
||||||
|
],
|
||||||
|
"User-Agent": [
|
||||||
|
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/pat"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"method": "GET",
|
||||||
|
"path": "/api/2.0/preview/scim/v2/Me"
|
||||||
|
}
|
|
@ -0,0 +1,4 @@
|
||||||
|
{
|
||||||
|
"id":"[USERID]",
|
||||||
|
"userName":"[USERNAME]"
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
export DATABRICKS_TOKEN=dapi1234
|
||||||
|
|
||||||
|
$CLI current-user me
|
|
@ -0,0 +1,20 @@
|
||||||
|
LocalOnly = true
|
||||||
|
|
||||||
|
RecordRequests = true
|
||||||
|
IncludeRequestHeaders = ["Authorization", "User-Agent"]
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '(linux|darwin|windows)'
|
||||||
|
New = '[OS]'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = " upstream/[A-Za-z0-9.-]+"
|
||||||
|
New = ""
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = " upstream-version/[A-Za-z0-9.-]+"
|
||||||
|
New = ""
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = " cicd/[A-Za-z0-9.-]+"
|
||||||
|
New = ""
|
|
@ -0,0 +1,56 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""This script implements "diff -r -U2 dir1 dir2" but applies replacements first"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import difflib
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
def replaceAll(patterns, s):
|
||||||
|
for comp, new in patterns:
|
||||||
|
s = comp.sub(new, s)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
d1, d2 = sys.argv[1:]
|
||||||
|
d1, d2 = Path(d1), Path(d2)
|
||||||
|
|
||||||
|
with open("repls.json") as f:
|
||||||
|
repls = json.load(f)
|
||||||
|
|
||||||
|
patterns = []
|
||||||
|
for r in repls:
|
||||||
|
try:
|
||||||
|
c = re.compile(r["Old"])
|
||||||
|
patterns.append((c, r["New"]))
|
||||||
|
except re.error as e:
|
||||||
|
print(f"Regex error for pattern {r}: {e}", file=sys.stderr)
|
||||||
|
|
||||||
|
files1 = [str(p.relative_to(d1)) for p in d1.rglob("*") if p.is_file()]
|
||||||
|
files2 = [str(p.relative_to(d2)) for p in d2.rglob("*") if p.is_file()]
|
||||||
|
|
||||||
|
set1 = set(files1)
|
||||||
|
set2 = set(files2)
|
||||||
|
|
||||||
|
for f in sorted(set1 | set2):
|
||||||
|
p1 = d1 / f
|
||||||
|
p2 = d2 / f
|
||||||
|
if f not in set2:
|
||||||
|
print(f"Only in {d1}: {f}")
|
||||||
|
elif f not in set1:
|
||||||
|
print(f"Only in {d2}: {f}")
|
||||||
|
else:
|
||||||
|
a = [replaceAll(patterns, x) for x in p1.read_text().splitlines(True)]
|
||||||
|
b = [replaceAll(patterns, x) for x in p2.read_text().splitlines(True)]
|
||||||
|
if a != b:
|
||||||
|
p1_str = p1.as_posix()
|
||||||
|
p2_str = p2.as_posix()
|
||||||
|
for line in difflib.unified_diff(a, b, p1_str, p2_str, "", "", 2):
|
||||||
|
print(line, end="")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
lines = sys.stdin.readlines()
|
||||||
|
lines.sort()
|
||||||
|
sys.stdout.write("".join(lines))
|
|
@ -1 +0,0 @@
|
||||||
databricks
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
bundle:
|
||||||
|
name: debug
|
|
@ -0,0 +1,15 @@
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
|
||||||
|
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
|
||||||
|
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||||
|
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||||
|
< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||||
|
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||||
|
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
|
@ -0,0 +1,93 @@
|
||||||
|
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
|
||||||
|
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load
|
||||||
|
10:07:59 Info: Phase: load pid=12345 mutator=load
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit
|
||||||
|
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=<func>
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize
|
||||||
|
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
|
||||||
|
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
|
||||||
|
< HTTP/1.1 200 OK
|
||||||
|
< {
|
||||||
|
< "id": "[USERID]",
|
||||||
|
< "userName": "[USERNAME]"
|
||||||
|
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators)
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||||
|
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||||
|
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||||
|
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
|
||||||
|
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit
|
||||||
|
10:07:59 Debug: Apply pid=12345 mutator=validate
|
||||||
|
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||||
|
< HTTP/1.1 404 Not Found
|
||||||
|
< {
|
||||||
|
< "message": "Workspace path not found"
|
||||||
|
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
|
||||||
|
> {
|
||||||
|
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||||
|
> }
|
||||||
|
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||||
|
< HTTP/1.1 200 OK
|
||||||
|
< {
|
||||||
|
< "object_type": "DIRECTORY",
|
||||||
|
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||||
|
10:07:59 Info: completed execution pid=12345 exit_code=0
|
|
@ -0,0 +1,7 @@
|
||||||
|
Name: debug
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: [USERNAME]
|
||||||
|
Path: /Workspace/Users/[USERNAME]/.bundle/debug/default
|
||||||
|
|
||||||
|
Validation OK!
|
|
@ -0,0 +1,4 @@
|
||||||
|
$CLI bundle validate --debug 2> full.stderr.txt
|
||||||
|
grep -vw parallel full.stderr.txt > out.stderr.txt
|
||||||
|
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
|
||||||
|
rm full.stderr.txt
|
|
@ -0,0 +1,18 @@
|
||||||
|
LocalOnly = true
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
# The keys are unsorted and also vary per OS
|
||||||
|
Old = 'Environment variables for Terraform: ([A-Z_ ,]+) '
|
||||||
|
New = 'Environment variables for Terraform: ...redacted... '
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = 'pid=[0-9]+'
|
||||||
|
New = 'pid=12345'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\d\d:\d\d:\d\d'
|
||||||
|
New = '10:07:59'
|
||||||
|
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\\'
|
||||||
|
New = '/'
|
|
@ -0,0 +1,2 @@
|
||||||
|
bundle:
|
||||||
|
name: git_job
|
|
@ -0,0 +1,17 @@
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
out:
|
||||||
|
name: gitjob
|
||||||
|
tasks:
|
||||||
|
- task_key: test_task
|
||||||
|
notebook_task:
|
||||||
|
notebook_path: some/test/notebook.py
|
||||||
|
- task_key: test_task_2
|
||||||
|
notebook_task:
|
||||||
|
notebook_path: /Workspace/Users/foo@bar.com/some/test/notebook.py
|
||||||
|
source: WORKSPACE
|
||||||
|
git_source:
|
||||||
|
git_branch: main
|
||||||
|
git_commit: abcdef
|
||||||
|
git_provider: github
|
||||||
|
git_url: https://git.databricks.com
|
|
@ -0,0 +1,2 @@
|
||||||
|
Job is using Git source, skipping downloading files
|
||||||
|
Job configuration successfully saved to out.job.yml
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle generate job --existing-job-id 1234 --config-dir . --key out
|
|
@ -0,0 +1,33 @@
|
||||||
|
LocalOnly = true # This test needs to run against stubbed Databricks API
|
||||||
|
|
||||||
|
[[Server]]
|
||||||
|
Pattern = "GET /api/2.1/jobs/get"
|
||||||
|
Response.Body = '''
|
||||||
|
{
|
||||||
|
"job_id": 11223344,
|
||||||
|
"settings": {
|
||||||
|
"name": "gitjob",
|
||||||
|
"git_source": {
|
||||||
|
"git_url": "https://git.databricks.com",
|
||||||
|
"git_provider": "github",
|
||||||
|
"git_branch": "main",
|
||||||
|
"git_commit": "abcdef"
|
||||||
|
},
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"task_key": "test_task",
|
||||||
|
"notebook_task": {
|
||||||
|
"notebook_path": "some/test/notebook.py"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"task_key": "test_task_2",
|
||||||
|
"notebook_task": {
|
||||||
|
"source": "WORKSPACE",
|
||||||
|
"notebook_path": "/Workspace/Users/foo@bar.com/some/test/notebook.py"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
'''
|
|
@ -0,0 +1,2 @@
|
||||||
|
bundle:
|
||||||
|
name: git-permerror
|
|
@ -0,0 +1,81 @@
|
||||||
|
=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git
|
||||||
|
|
||||||
|
>>> [CLI] bundle validate
|
||||||
|
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
Name: git-permerror
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: [USERNAME]
|
||||||
|
Path: /Workspace/Users/[USERNAME]/.bundle/git-permerror/default
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> [CLI] bundle validate -o json
|
||||||
|
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||||
|
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git/HEAD
|
||||||
|
|
||||||
|
>>> [CLI] bundle validate -o json
|
||||||
|
Warn: failed to load current branch: open HEAD: permission denied
|
||||||
|
Warn: failed to load latest commit: open HEAD: permission denied
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||||
|
Warn: failed to load current branch: open HEAD: permission denied
|
||||||
|
Warn: failed to load latest commit: open HEAD: permission denied
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.
|
||||||
|
|
||||||
|
>>> chmod 000 .git/config
|
||||||
|
|
||||||
|
>>> [CLI] bundle validate -o json
|
||||||
|
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
||||||
|
|
||||||
|
>>> withdir subdir/a/b [CLI] bundle validate -o json
|
||||||
|
Warn: failed to read .git: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
Error: unable to load repository specific gitconfig: open config: permission denied
|
||||||
|
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
{
|
||||||
|
"bundle_root_path": "."
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
mkdir myrepo
|
||||||
|
cd myrepo
|
||||||
|
cp ../databricks.yml .
|
||||||
|
git-repo-init
|
||||||
|
mkdir -p subdir/a/b
|
||||||
|
|
||||||
|
printf "=== No permission to access .git. Badness: inferred flag is set to true even though we did not infer branch. bundle_root_path is not correct in subdir case.\n"
|
||||||
|
trace chmod 000 .git
|
||||||
|
errcode trace $CLI bundle validate
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
printf "\n\n=== No permissions to read .git/HEAD. Badness: warning is not shown. inferred is incorrectly set to true. bundle_root_path is not correct in subdir case.\n"
|
||||||
|
chmod 700 .git
|
||||||
|
trace chmod 000 .git/HEAD
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
printf "\n\n=== No permissions to read .git/config. Badness: inferred is incorretly set to true. bundle_root_path is not correct is subdir case.\n"
|
||||||
|
chmod 666 .git/HEAD
|
||||||
|
trace chmod 000 .git/config
|
||||||
|
errcode trace $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
errcode trace withdir subdir/a/b $CLI bundle validate -o json | jq .bundle.git
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
rm -fr myrepo
|
|
@ -0,0 +1,5 @@
|
||||||
|
Badness = "inferred flag is set to true incorrect; bundle_root_path is not correct; Warn and Error talk about the same; Warn goes to stderr, Error goes to stdout (for backward compat); Warning about permissions repeated twice"
|
||||||
|
|
||||||
|
[GOOS]
|
||||||
|
# This test relies on chmod which does not work on Windows
|
||||||
|
windows = false
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle deploy --help
|
>>> [CLI] bundle deploy --help
|
||||||
Deploy bundle
|
Deploy bundle
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle deployment --help
|
>>> [CLI] bundle deployment --help
|
||||||
Deployment related commands
|
Deployment related commands
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle destroy --help
|
>>> [CLI] bundle destroy --help
|
||||||
Destroy deployed bundle resources
|
Destroy deployed bundle resources
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle generate dashboard --help
|
>>> [CLI] bundle generate dashboard --help
|
||||||
Generate configuration for a dashboard
|
Generate configuration for a dashboard
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle generate job --help
|
>>> [CLI] bundle generate job --help
|
||||||
Generate bundle configuration for a job
|
Generate bundle configuration for a job
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle generate pipeline --help
|
>>> [CLI] bundle generate pipeline --help
|
||||||
Generate bundle configuration for a pipeline
|
Generate bundle configuration for a pipeline
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle generate --help
|
>>> [CLI] bundle generate --help
|
||||||
Generate bundle configuration
|
Generate bundle configuration
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle init --help
|
>>> [CLI] bundle init --help
|
||||||
Initialize using a bundle template.
|
Initialize using a bundle template.
|
||||||
|
|
||||||
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
|
TEMPLATE_PATH optionally specifies which template to use. It can be one of the following:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle open --help
|
>>> [CLI] bundle open --help
|
||||||
Open a resource in the browser
|
Open a resource in the browser
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle run --help
|
>>> [CLI] bundle run --help
|
||||||
Run the job or pipeline identified by KEY.
|
Run the job or pipeline identified by KEY.
|
||||||
|
|
||||||
The KEY is the unique identifier of the resource to run. In addition to
|
The KEY is the unique identifier of the resource to run. In addition to
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle schema --help
|
>>> [CLI] bundle schema --help
|
||||||
Generate JSON Schema for bundle configuration
|
Generate JSON Schema for bundle configuration
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle summary --help
|
>>> [CLI] bundle summary --help
|
||||||
Summarize resources deployed by this bundle
|
Summarize resources deployed by this bundle
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle sync --help
|
>>> [CLI] bundle sync --help
|
||||||
Synchronize bundle tree to the workspace
|
Synchronize bundle tree to the workspace
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate --help
|
>>> [CLI] bundle validate --help
|
||||||
Validate configuration
|
Validate configuration
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle --help
|
>>> [CLI] bundle --help
|
||||||
Databricks Asset Bundles let you express data/AI/analytics projects as code.
|
Databricks Asset Bundles let you express data/AI/analytics projects as code.
|
||||||
|
|
||||||
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
|
Online documentation: https://docs.databricks.com/en/dev-tools/bundles/index.html
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
Error: Files in the 'include' configuration section must be YAML files.
|
Error: Files in the 'include' configuration section must be YAML or JSON files.
|
||||||
in databricks.yml:5:4
|
in databricks.yml:5:4
|
||||||
|
|
||||||
The file test.py in the 'include' configuration section is not a YAML file, and only YAML files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
|
The file test.py in the 'include' configuration section is not a YAML or JSON file, and only such files are supported. To include files to sync, specify them in the 'sync.include' configuration section instead.
|
||||||
|
|
||||||
Name: non_yaml_in_includes
|
Name: non_yaml_in_includes
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t default
|
>>> [CLI] bundle validate -o json -t default
|
||||||
{
|
{
|
||||||
"autoscale": {
|
"autoscale": {
|
||||||
"max_workers": 7,
|
"max_workers": 7,
|
||||||
|
@ -15,7 +15,7 @@
|
||||||
"spark_version": "13.3.x-scala2.12"
|
"spark_version": "13.3.x-scala2.12"
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t development
|
>>> [CLI] bundle validate -o json -t development
|
||||||
{
|
{
|
||||||
"autoscale": {
|
"autoscale": {
|
||||||
"max_workers": 3,
|
"max_workers": 3,
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t development
|
>>> [CLI] bundle validate -o json -t development
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
@ -27,12 +27,12 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t staging
|
>>> [CLI] bundle validate -o json -t staging
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t development
|
>>> [CLI] bundle validate -o json -t development
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
@ -27,21 +27,21 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -t development
|
>>> [CLI] bundle validate -t development
|
||||||
Name: override_job_cluster
|
Name: override_job_cluster
|
||||||
Target: development
|
Target: development
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/development
|
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/development
|
||||||
|
|
||||||
Validation OK!
|
Validation OK!
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t staging
|
>>> [CLI] bundle validate -o json -t staging
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"edit_mode": "UI_LOCKED",
|
"edit_mode": "UI_LOCKED",
|
||||||
"format": "MULTI_TASK",
|
"format": "MULTI_TASK",
|
||||||
|
@ -64,11 +64,11 @@ Validation OK!
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -t staging
|
>>> [CLI] bundle validate -t staging
|
||||||
Name: override_job_cluster
|
Name: override_job_cluster
|
||||||
Target: staging
|
Target: staging
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/override_job_cluster/staging
|
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_cluster/staging
|
||||||
|
|
||||||
Validation OK!
|
Validation OK!
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate -o json -t development
|
>>> errcode [CLI] bundle validate -o json -t development
|
||||||
Error: file ./test1.py not found
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate -o json -t staging
|
>>> errcode [CLI] bundle validate -o json -t staging
|
||||||
Error: file ./test1.py not found
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
|
|
||||||
|
@ -63,14 +63,14 @@ Exit code: 1
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> errcode $CLI bundle validate -t staging
|
>>> errcode [CLI] bundle validate -t staging
|
||||||
Error: file ./test1.py not found
|
Error: file ./test1.py not found
|
||||||
|
|
||||||
Name: override_job_tasks
|
Name: override_job_tasks
|
||||||
Target: staging
|
Target: staging
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/override_job_tasks/staging
|
Path: /Workspace/Users/[USERNAME]/.bundle/override_job_tasks/staging
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t dev
|
>>> [CLI] bundle validate -o json -t dev
|
||||||
Warning: expected map, found string
|
Warning: expected map, found string
|
||||||
at resources.clusters.my_cluster
|
at resources.clusters.my_cluster
|
||||||
in databricks.yml:6:17
|
in databricks.yml:6:17
|
||||||
|
@ -13,7 +13,7 @@ Warning: expected map, found string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -t dev
|
>>> [CLI] bundle validate -t dev
|
||||||
Warning: expected map, found string
|
Warning: expected map, found string
|
||||||
at resources.clusters.my_cluster
|
at resources.clusters.my_cluster
|
||||||
in databricks.yml:6:17
|
in databricks.yml:6:17
|
||||||
|
@ -21,7 +21,7 @@ Warning: expected map, found string
|
||||||
Name: merge-string-map
|
Name: merge-string-map
|
||||||
Target: dev
|
Target: dev
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/merge-string-map/dev
|
Path: /Workspace/Users/[USERNAME]/.bundle/merge-string-map/dev
|
||||||
|
|
||||||
Found 1 warning
|
Found 1 warning
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t development
|
>>> [CLI] bundle validate -o json -t development
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"clusters": [
|
"clusters": [
|
||||||
|
@ -14,14 +14,14 @@
|
||||||
],
|
],
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/development/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "job",
|
"name": "job",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t staging
|
>>> [CLI] bundle validate -o json -t staging
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
"clusters": [
|
"clusters": [
|
||||||
|
@ -36,7 +36,7 @@
|
||||||
],
|
],
|
||||||
"deployment": {
|
"deployment": {
|
||||||
"kind": "BUNDLE",
|
"kind": "BUNDLE",
|
||||||
"metadata_file_path": "/Workspace/Users/$USERNAME/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/override_pipeline_cluster/staging/state/metadata.json"
|
||||||
},
|
},
|
||||||
"name": "job",
|
"name": "job",
|
||||||
"permissions": []
|
"permissions": []
|
||||||
|
|
|
@ -2,14 +2,14 @@
|
||||||
{
|
{
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"notebook_task": {
|
"notebook_task": {
|
||||||
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook"
|
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook"
|
||||||
},
|
},
|
||||||
"task_key": "notebook_example"
|
"task_key": "notebook_example"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"spark_python_task": {
|
"spark_python_task": {
|
||||||
"python_file": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file.py"
|
"python_file": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file.py"
|
||||||
},
|
},
|
||||||
"task_key": "spark_python_example"
|
"task_key": "spark_python_example"
|
||||||
},
|
},
|
||||||
|
@ -19,7 +19,7 @@
|
||||||
"dbt run",
|
"dbt run",
|
||||||
"dbt run"
|
"dbt run"
|
||||||
],
|
],
|
||||||
"project_directory": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/dbt_project"
|
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/dbt_project"
|
||||||
},
|
},
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"task_key": "dbt_example"
|
"task_key": "dbt_example"
|
||||||
|
@ -28,7 +28,7 @@
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"sql_task": {
|
"sql_task": {
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/sql.sql"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/sql.sql"
|
||||||
},
|
},
|
||||||
"warehouse_id": "cafef00d"
|
"warehouse_id": "cafef00d"
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file1.py"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file1.py"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"notebook": {
|
"notebook": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook1"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/file2.py"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/file2.py"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"notebook": {
|
"notebook": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/fallback/development/files/src/notebook2"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/fallback/development/files/src/notebook2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -t development -o json
|
>>> [CLI] bundle validate -t development -o json
|
||||||
|
|
||||||
>>> $CLI bundle validate -t error
|
>>> [CLI] bundle validate -t error
|
||||||
Error: notebook this value is overridden not found. Local notebook references are expected
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
||||||
Name: fallback
|
Name: fallback
|
||||||
Target: error
|
Target: error
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/fallback/error
|
Path: /Workspace/Users/[USERNAME]/.bundle/fallback/error
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -2,14 +2,14 @@
|
||||||
{
|
{
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"notebook_task": {
|
"notebook_task": {
|
||||||
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
|
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
|
||||||
},
|
},
|
||||||
"task_key": "notebook_example"
|
"task_key": "notebook_example"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"spark_python_task": {
|
"spark_python_task": {
|
||||||
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
|
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
|
||||||
},
|
},
|
||||||
"task_key": "spark_python_example"
|
"task_key": "spark_python_example"
|
||||||
},
|
},
|
||||||
|
@ -19,7 +19,7 @@
|
||||||
"dbt run",
|
"dbt run",
|
||||||
"dbt run"
|
"dbt run"
|
||||||
],
|
],
|
||||||
"project_directory": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/dbt_project"
|
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/dbt_project"
|
||||||
},
|
},
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"task_key": "dbt_example"
|
"task_key": "dbt_example"
|
||||||
|
@ -28,7 +28,7 @@
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"sql_task": {
|
"sql_task": {
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/sql.sql"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/sql.sql"
|
||||||
},
|
},
|
||||||
"warehouse_id": "cafef00d"
|
"warehouse_id": "cafef00d"
|
||||||
},
|
},
|
||||||
|
@ -68,7 +68,7 @@
|
||||||
"for_each_task": {
|
"for_each_task": {
|
||||||
"task": {
|
"task": {
|
||||||
"notebook_task": {
|
"notebook_task": {
|
||||||
"notebook_path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook"
|
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -80,7 +80,7 @@
|
||||||
"task": {
|
"task": {
|
||||||
"job_cluster_key": "default",
|
"job_cluster_key": "default",
|
||||||
"spark_python_task": {
|
"spark_python_task": {
|
||||||
"python_file": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file.py"
|
"python_file": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file.py"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,22 +1,22 @@
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file1.py"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file1.py"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"notebook": {
|
"notebook": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook1"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook1"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"file": {
|
"file": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/file2.py"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/file2.py"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"notebook": {
|
"notebook": {
|
||||||
"path": "/Workspace/Users/$USERNAME/.bundle/nominal/development/files/src/notebook2"
|
"path": "/Workspace/Users/[USERNAME]/.bundle/nominal/development/files/src/notebook2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,15 +1,15 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -t development -o json
|
>>> [CLI] bundle validate -t development -o json
|
||||||
|
|
||||||
>>> $CLI bundle validate -t error
|
>>> [CLI] bundle validate -t error
|
||||||
Error: notebook this value is overridden not found. Local notebook references are expected
|
Error: notebook this value is overridden not found. Local notebook references are expected
|
||||||
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
to contain one of the following file extensions: [.py, .r, .scala, .sql, .ipynb]
|
||||||
|
|
||||||
Name: nominal
|
Name: nominal
|
||||||
Target: error
|
Target: error
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/nominal/error
|
Path: /Workspace/Users/[USERNAME]/.bundle/nominal/error
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -t default -o json
|
>>> [CLI] bundle validate -t default -o json
|
||||||
|
|
||||||
>>> $CLI bundle validate -t override -o json
|
>>> [CLI] bundle validate -t override -o json
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t development
|
>>> [CLI] bundle validate -o json -t development
|
||||||
{
|
{
|
||||||
"mode": "development",
|
"mode": "development",
|
||||||
"quality_monitors": {
|
"quality_monitors": {
|
||||||
|
@ -21,7 +21,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t staging
|
>>> [CLI] bundle validate -o json -t staging
|
||||||
{
|
{
|
||||||
"mode": null,
|
"mode": null,
|
||||||
"quality_monitors": {
|
"quality_monitors": {
|
||||||
|
@ -46,7 +46,7 @@
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
>>> $CLI bundle validate -o json -t production
|
>>> [CLI] bundle validate -o json -t production
|
||||||
{
|
{
|
||||||
"mode": null,
|
"mode": null,
|
||||||
"quality_monitors": {
|
"quality_monitors": {
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
bundle:
|
||||||
|
name: scripts
|
||||||
|
|
||||||
|
experimental:
|
||||||
|
scripts:
|
||||||
|
preinit: "python3 ./myscript.py $EXITCODE preinit"
|
||||||
|
postinit: "python3 ./myscript.py 0 postinit"
|
||||||
|
prebuild: "python3 ./myscript.py 0 prebuild"
|
||||||
|
postbuild: "python3 ./myscript.py 0 postbuild"
|
||||||
|
predeploy: "python3 ./myscript.py 0 predeploy"
|
||||||
|
postdeploy: "python3 ./myscript.py 0 postdeploy"
|
|
@ -0,0 +1,8 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
info = " ".join(sys.argv[1:])
|
||||||
|
sys.stderr.write(f"from myscript.py {info}: hello stderr!\n")
|
||||||
|
sys.stdout.write(f"from myscript.py {info}: hello stdout!\n")
|
||||||
|
|
||||||
|
exitcode = int(sys.argv[1])
|
||||||
|
sys.exit(exitcode)
|
|
@ -0,0 +1,50 @@
|
||||||
|
|
||||||
|
>>> EXITCODE=0 errcode [CLI] bundle validate
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 0 preinit: hello stdout!
|
||||||
|
from myscript.py 0 preinit: hello stderr!
|
||||||
|
Executing 'postinit' script
|
||||||
|
from myscript.py 0 postinit: hello stdout!
|
||||||
|
from myscript.py 0 postinit: hello stderr!
|
||||||
|
Name: scripts
|
||||||
|
Target: default
|
||||||
|
Workspace:
|
||||||
|
User: [USERNAME]
|
||||||
|
Path: /Workspace/Users/[USERNAME]/.bundle/scripts/default
|
||||||
|
|
||||||
|
Validation OK!
|
||||||
|
|
||||||
|
>>> EXITCODE=1 errcode [CLI] bundle validate
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 1 preinit: hello stdout!
|
||||||
|
from myscript.py 1 preinit: hello stderr!
|
||||||
|
Error: failed to execute script: exit status 1
|
||||||
|
|
||||||
|
Name: scripts
|
||||||
|
|
||||||
|
Found 1 error
|
||||||
|
|
||||||
|
Exit code: 1
|
||||||
|
|
||||||
|
>>> EXITCODE=0 errcode [CLI] bundle deploy
|
||||||
|
Executing 'preinit' script
|
||||||
|
from myscript.py 0 preinit: hello stdout!
|
||||||
|
from myscript.py 0 preinit: hello stderr!
|
||||||
|
Executing 'postinit' script
|
||||||
|
from myscript.py 0 postinit: hello stdout!
|
||||||
|
from myscript.py 0 postinit: hello stderr!
|
||||||
|
Executing 'prebuild' script
|
||||||
|
from myscript.py 0 prebuild: hello stdout!
|
||||||
|
from myscript.py 0 prebuild: hello stderr!
|
||||||
|
Executing 'postbuild' script
|
||||||
|
from myscript.py 0 postbuild: hello stdout!
|
||||||
|
from myscript.py 0 postbuild: hello stderr!
|
||||||
|
Executing 'predeploy' script
|
||||||
|
from myscript.py 0 predeploy: hello stdout!
|
||||||
|
from myscript.py 0 predeploy: hello stderr!
|
||||||
|
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/scripts/default/files...
|
||||||
|
Deploying resources...
|
||||||
|
Deployment complete!
|
||||||
|
Executing 'postdeploy' script
|
||||||
|
from myscript.py 0 postdeploy: hello stdout!
|
||||||
|
from myscript.py 0 postdeploy: hello stderr!
|
|
@ -0,0 +1,3 @@
|
||||||
|
trace EXITCODE=0 errcode $CLI bundle validate
|
||||||
|
trace EXITCODE=1 errcode $CLI bundle validate
|
||||||
|
trace EXITCODE=0 errcode $CLI bundle deploy
|
|
@ -1,10 +1,10 @@
|
||||||
Error: path "$TMPDIR" is not within repository root "$TMPDIR/myrepo"
|
Error: path "[TMPDIR]" is not within repository root "[TMPDIR]/myrepo"
|
||||||
|
|
||||||
Name: test-bundle
|
Name: test-bundle
|
||||||
Target: default
|
Target: default
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
|
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
|
||||||
|
|
||||||
Found 1 error
|
Found 1 error
|
||||||
|
|
||||||
|
|
|
@ -3,4 +3,6 @@ mkdir myrepo
|
||||||
cd myrepo
|
cd myrepo
|
||||||
cp ../databricks.yml .
|
cp ../databricks.yml .
|
||||||
git-repo-init
|
git-repo-init
|
||||||
$CLI bundle validate | sed 's/\\\\/\//g'
|
errcode $CLI bundle validate
|
||||||
|
cd ..
|
||||||
|
rm -fr myrepo
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
[[Repls]]
|
||||||
|
Old = '\\\\myrepo'
|
||||||
|
New = '/myrepo'
|
|
@ -1,7 +1,7 @@
|
||||||
Name: test-bundle
|
Name: test-bundle
|
||||||
Target: default
|
Target: default
|
||||||
Workspace:
|
Workspace:
|
||||||
User: $USERNAME
|
User: [USERNAME]
|
||||||
Path: /Workspace/Users/$USERNAME/.bundle/test-bundle/default
|
Path: /Workspace/Users/[USERNAME]/.bundle/test-bundle/default
|
||||||
|
|
||||||
Validation OK!
|
Validation OK!
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
|
@ -0,0 +1,3 @@
|
||||||
|
Error: failed to compute file content for helpers.txt.tmpl. template: :1:14: executing "" at <user_name>: error calling user_name:
|
||||||
|
|
||||||
|
Exit code: 1
|
|
@ -0,0 +1 @@
|
||||||
|
$CLI bundle init .
|
|
@ -0,0 +1 @@
|
||||||
|
user_name: {{ user_name }}
|
|
@ -0,0 +1,7 @@
|
||||||
|
Badness = '''(minor) error message is not great: executing "" at <user_name>: error calling user_name:'''
|
||||||
|
LocalOnly = true
|
||||||
|
|
||||||
|
[[Server]]
|
||||||
|
Pattern = "GET /api/2.0/preview/scim/v2/Me"
|
||||||
|
Response.Body = '{}'
|
||||||
|
Response.StatusCode = 500
|
|
@ -0,0 +1 @@
|
||||||
|
{}
|
|
@ -0,0 +1,2 @@
|
||||||
|
✨ Successfully initialized template
|
||||||
|
user_name: [USERNAME]
|
|
@ -0,0 +1,3 @@
|
||||||
|
$CLI bundle init .
|
||||||
|
cat helpers.txt
|
||||||
|
rm helpers.txt
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue