mirror of https://github.com/databricks/cli.git
Merge branch 'main' into populate-sources
This commit is contained in:
commit
6bec3deac2
|
@ -1 +1 @@
|
|||
0be1b914249781b5e903b7676fd02255755bc851
|
||||
e5c870006a536121442cfd2441bdc8a5fb76ae1e
|
|
@ -179,7 +179,7 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
{{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}
|
||||
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
|
||||
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
|
||||
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
|
||||
{{- $onlyPathArgsRequiredAsPositionalArguments := and .Request (eq (len .RequiredPositionalArguments) (len .Request.RequiredPathFields)) -}}
|
||||
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
|
||||
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
|
||||
|
||||
|
@ -218,12 +218,12 @@ func new{{.PascalName}}() *cobra.Command {
|
|||
cmd.Args = func(cmd *cobra.Command, args []string) error {
|
||||
{{- if $hasDifferentArgsWithJsonFlag }}
|
||||
if cmd.Flags().Changed("json") {
|
||||
err := root.ExactArgs({{len $request.RequiredPathFields}})(cmd, args)
|
||||
err := root.ExactArgs({{len .Request.RequiredPathFields}})(cmd, args)
|
||||
if err != nil {
|
||||
{{- if eq 0 (len $request.RequiredPathFields) }}
|
||||
{{- if eq 0 (len .Request.RequiredPathFields) }}
|
||||
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide{{- range $index, $field := $request.RequiredFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
|
||||
{{- else }}
|
||||
return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := $request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
|
||||
return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := .Request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
|
||||
{{- end }}
|
||||
}
|
||||
return nil
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
cmd/account/access-control/access-control.go linguist-generated=true
|
||||
cmd/account/billable-usage/billable-usage.go linguist-generated=true
|
||||
cmd/account/budget-policy/budget-policy.go linguist-generated=true
|
||||
cmd/account/budgets/budgets.go linguist-generated=true
|
||||
cmd/account/cmd.go linguist-generated=true
|
||||
cmd/account/credentials/credentials.go linguist-generated=true
|
||||
cmd/account/csp-enablement-account/csp-enablement-account.go linguist-generated=true
|
||||
cmd/account/custom-app-integration/custom-app-integration.go linguist-generated=true
|
||||
cmd/account/disable-legacy-features/disable-legacy-features.go linguist-generated=true
|
||||
cmd/account/enable-ip-access-lists/enable-ip-access-lists.go linguist-generated=true
|
||||
cmd/account/encryption-keys/encryption-keys.go linguist-generated=true
|
||||
cmd/account/esm-enablement-account/esm-enablement-account.go linguist-generated=true
|
||||
cmd/account/federation-policy/federation-policy.go linguist-generated=true
|
||||
|
@ -75,6 +77,7 @@ cmd/workspace/instance-pools/instance-pools.go linguist-generated=true
|
|||
cmd/workspace/instance-profiles/instance-profiles.go linguist-generated=true
|
||||
cmd/workspace/ip-access-lists/ip-access-lists.go linguist-generated=true
|
||||
cmd/workspace/jobs/jobs.go linguist-generated=true
|
||||
cmd/workspace/lakeview-embedded/lakeview-embedded.go linguist-generated=true
|
||||
cmd/workspace/lakeview/lakeview.go linguist-generated=true
|
||||
cmd/workspace/libraries/libraries.go linguist-generated=true
|
||||
cmd/workspace/metastores/metastores.go linguist-generated=true
|
||||
|
@ -99,11 +102,13 @@ cmd/workspace/providers/providers.go linguist-generated=true
|
|||
cmd/workspace/quality-monitors/quality-monitors.go linguist-generated=true
|
||||
cmd/workspace/queries-legacy/queries-legacy.go linguist-generated=true
|
||||
cmd/workspace/queries/queries.go linguist-generated=true
|
||||
cmd/workspace/query-execution/query-execution.go linguist-generated=true
|
||||
cmd/workspace/query-history/query-history.go linguist-generated=true
|
||||
cmd/workspace/query-visualizations-legacy/query-visualizations-legacy.go linguist-generated=true
|
||||
cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true
|
||||
cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true
|
||||
cmd/workspace/recipients/recipients.go linguist-generated=true
|
||||
cmd/workspace/redash-config/redash-config.go linguist-generated=true
|
||||
cmd/workspace/registered-models/registered-models.go linguist-generated=true
|
||||
cmd/workspace/repos/repos.go linguist-generated=true
|
||||
cmd/workspace/resource-quotas/resource-quotas.go linguist-generated=true
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
* @pietern @andrewnester @shreyas-goenka @denik
|
||||
cmd/labs @alexott @nfx
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
## Changes
|
||||
<!-- Summary of your changes that are easy to understand -->
|
||||
<!-- Brief summary of your changes that is easy to understand -->
|
||||
|
||||
## Why
|
||||
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
|
||||
For example, were there any decisions behind the change that are not reflected in the code itself? -->
|
||||
|
||||
## Tests
|
||||
<!-- How is this tested? -->
|
||||
<!-- How have you tested the changes? -->
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ jobs:
|
|||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
|
||||
- uses: actions/stale@5bef64f19d7facfb25b37b414482c7164d639639 # v9.1.0
|
||||
with:
|
||||
stale-issue-message: This issue has not received a response in a while. If you want to keep this issue open, please leave a comment below and auto-close will be canceled.
|
||||
stale-pr-message: This PR has not received an update in a while. If you want to keep this PR open, please leave a comment below or push a new commit and auto-close will be canceled.
|
||||
|
|
|
@ -20,7 +20,7 @@ jobs:
|
|||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||
with:
|
||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||
|
|
|
@ -23,7 +23,7 @@ jobs:
|
|||
steps:
|
||||
- name: Generate GitHub App Token
|
||||
id: generate-token
|
||||
uses: actions/create-github-app-token@c1a285145b9d317df6ced56c09f525b5c2b6f755 # v1.11.1
|
||||
uses: actions/create-github-app-token@136412a57a7081aa63c935a2cc2918f76c34f514 # v1.11.2
|
||||
with:
|
||||
app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
|
||||
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
|
||||
|
|
|
@ -50,10 +50,10 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
|
@ -82,7 +82,7 @@ jobs:
|
|||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
- uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
# Use different schema from regular job, to avoid overwriting the same key
|
||||
cache-dependency-path: |
|
||||
go.sum
|
||||
|
@ -95,12 +95,12 @@ jobs:
|
|||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||
git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@971e284b6050e8a5849b72094c50ab08da042db8 # v6.1.1
|
||||
uses: golangci/golangci-lint-action@2226d7cb06a077cd73e56eedd38eecad18e5d837 # v6.5.0
|
||||
with:
|
||||
version: v1.63.4
|
||||
args: --timeout=15m
|
||||
- name: Run ruff
|
||||
uses: astral-sh/ruff-action@31a518504640beb4897d0b9f9e50a2a9196e75ba # v3.0.1
|
||||
uses: astral-sh/ruff-action@f14634c415d3e63ffd4d550a22f037df4c734a60 # v3.1.0
|
||||
with:
|
||||
version: "0.9.1"
|
||||
args: "format --check"
|
||||
|
@ -116,7 +116,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
# Use different schema from regular job, to avoid overwriting the same key
|
||||
cache-dependency-path: |
|
||||
go.sum
|
||||
|
@ -145,7 +145,10 @@ jobs:
|
|||
go run main.go bundle schema > schema.json
|
||||
|
||||
# Add markdownDescription keyword to ajv
|
||||
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js
|
||||
echo "module.exports = function(a) {
|
||||
a.addKeyword('markdownDescription');
|
||||
a.addKeyword('deprecationMessage');
|
||||
}" >> keywords.js
|
||||
|
||||
for file in ./bundle/internal/schema/testdata/pass/*.yml; do
|
||||
ajv test -s schema.json -d $file --valid -c=./keywords.js
|
||||
|
|
|
@ -34,7 +34,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
@ -54,21 +54,21 @@ jobs:
|
|||
args: release --snapshot --skip docker
|
||||
|
||||
- name: Upload macOS binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_darwin_snapshot
|
||||
path: |
|
||||
dist/*_darwin_*/
|
||||
|
||||
- name: Upload Linux binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_linux_snapshot
|
||||
path: |
|
||||
dist/*_linux_*/
|
||||
|
||||
- name: Upload Windows binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_windows_snapshot
|
||||
path: |
|
||||
|
|
|
@ -26,7 +26,7 @@ jobs:
|
|||
- name: Setup Go
|
||||
uses: actions/setup-go@3041bf56c941b39c61721a86cd11f3bb1338122a # v5.2.0
|
||||
with:
|
||||
go-version: 1.23.4
|
||||
go-version-file: go.mod
|
||||
|
||||
# The default cache key for this action considers only the `go.sum` file.
|
||||
# We include .goreleaser.yaml here to differentiate from the cache used by the push action
|
||||
|
@ -46,7 +46,7 @@ jobs:
|
|||
# QEMU is required to build cross platform docker images using buildx.
|
||||
# It allows virtualization of the CPU architecture at the application level.
|
||||
- name: Set up QEMU dependency
|
||||
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Run GoReleaser
|
||||
id: releaser
|
||||
|
|
|
@ -25,11 +25,8 @@ coverage-acceptance.txt
|
|||
__pycache__
|
||||
*.pyc
|
||||
|
||||
.terraform
|
||||
.terraform.lock.hcl
|
||||
|
||||
.idea
|
||||
.vscode/launch.json
|
||||
.vscode/tasks.json
|
||||
|
||||
.databricks
|
||||
.ruff_cache
|
||||
|
|
|
@ -17,5 +17,8 @@
|
|||
"python.envFile": "${workspaceRoot}/.env",
|
||||
"python.analysis.stubPath": ".vscode",
|
||||
"jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\<codecell\\>|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])",
|
||||
"jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------"
|
||||
"jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------",
|
||||
"files.associations": {
|
||||
"script": "shellscript"
|
||||
}
|
||||
}
|
||||
|
|
109
CHANGELOG.md
109
CHANGELOG.md
|
@ -1,5 +1,114 @@
|
|||
# Version changelog
|
||||
|
||||
## [Release] Release v0.243.0
|
||||
|
||||
CLI:
|
||||
* Upgrade Go SDK to 0.59.0 ([#2425](https://github.com/databricks/cli/pull/2425)).
|
||||
|
||||
Bundles:
|
||||
* Added a warning when `config` section is used in apps ([#2416](https://github.com/databricks/cli/pull/2416)).
|
||||
* Switch to use GET workspaces-files/{name} instead of workspace/export for state files to avoid 10MB limit ([#2423](https://github.com/databricks/cli/pull/2423)).
|
||||
* Use schema field for pipeline in builtin template ([#2347](https://github.com/databricks/cli/pull/2347)).
|
||||
* Add warning when variable interpolation is used for auth fields ([#2399](https://github.com/databricks/cli/pull/2399)).
|
||||
* Add warning when include is used in config files other than databricks.yml ([#2389](https://github.com/databricks/cli/pull/2389)).
|
||||
* Add support for schemas in deployment bind/unbind commands ([#2406](https://github.com/databricks/cli/pull/2406)).
|
||||
* Do not modify/create .gitignore in bundle root ([#2429](https://github.com/databricks/cli/pull/2429)).
|
||||
* Raise an error when there are multiple local libraries with the same basename used ([#2382](https://github.com/databricks/cli/pull/2382)).
|
||||
* Upgrade TF provider to 1.68.0 ([#2426](https://github.com/databricks/cli/pull/2426)).
|
||||
|
||||
API Changes:
|
||||
* Changed `databricks experiments log-inputs` command with new required argument order.
|
||||
* Added `databricks genie get-space` command.
|
||||
* Added `databricks providers list-provider-share-assets` command.
|
||||
* Changed `databricks shares update-permissions` command return type to become non-empty.
|
||||
|
||||
OpenAPI commit e5c870006a536121442cfd2441bdc8a5fb76ae1e (2025-03-03)
|
||||
|
||||
## [Release] Release v0.242.0
|
||||
|
||||
Notable changes:
|
||||
Starting this version CLI does not load bundle auth information when CLI command is executed inside the bundle directory with explicitly provided via `-p` flag profile.
|
||||
For more details see the related GitHub issue https://github.com/databricks/cli/issues/1358
|
||||
|
||||
CLI:
|
||||
* Do not load host from bundle for CLI commands when profile flag is used ([#2335](https://github.com/databricks/cli/pull/2335)).
|
||||
* Fixed accessing required path parameters in CLI generation when --json flag ([#2373](https://github.com/databricks/cli/pull/2373)).
|
||||
|
||||
Bundles:
|
||||
* Provide instructions for testing in the default-python template ([#2355](https://github.com/databricks/cli/pull/2355)).
|
||||
* Remove `run_as` from the built-in templates ([#2044](https://github.com/databricks/cli/pull/2044)).
|
||||
* Change warning about incomplete permissions section into a recommendation ([#2043](https://github.com/databricks/cli/pull/2043)).
|
||||
* Refine `mode: production` diagnostic output ([#2236](https://github.com/databricks/cli/pull/2236)).
|
||||
* Support serverless mode in default-python template (explicit prompt) ([#2377](https://github.com/databricks/cli/pull/2377)).
|
||||
* Set default data_security_mode to "SINGLE_USER" in bundle templates ([#2372](https://github.com/databricks/cli/pull/2372)).
|
||||
* Fixed spark version check for clusters defined in the same bundle ([#2374](https://github.com/databricks/cli/pull/2374)).
|
||||
|
||||
API Changes:
|
||||
* Added `databricks genie get-message-query-result-by-attachment` command.
|
||||
|
||||
OpenAPI commit 99f644e72261ef5ecf8d74db20f4b7a1e09723cc (2025-02-11)
|
||||
|
||||
## [Release] Release v0.241.2
|
||||
|
||||
This is a bugfix release to address an issue where jobs with tasks with a
|
||||
libraries section with PyPI packages could not be deployed.
|
||||
|
||||
Bundles:
|
||||
* Revert changes related to basename check for local libraries ([#2345](https://github.com/databricks/cli/pull/2345)).
|
||||
|
||||
## [Release] Release v0.241.1
|
||||
|
||||
Bundles:
|
||||
* Fix for regression deploying resources with PyPi and Maven library types ([#2341](https://github.com/databricks/cli/pull/2341)).
|
||||
|
||||
## [Release] Release v0.241.0
|
||||
|
||||
Bundles:
|
||||
* Added support to generate Git based jobs ([#2304](https://github.com/databricks/cli/pull/2304)).
|
||||
* Added support for run_as in pipelines ([#2287](https://github.com/databricks/cli/pull/2287)).
|
||||
* Raise an error when there are multiple local libraries with the same basename used ([#2297](https://github.com/databricks/cli/pull/2297)).
|
||||
* Fix env variable for AzureCli local config ([#2248](https://github.com/databricks/cli/pull/2248)).
|
||||
* Accept JSON files in includes section ([#2265](https://github.com/databricks/cli/pull/2265)).
|
||||
* Always print warnings and errors; clean up format ([#2213](https://github.com/databricks/cli/pull/2213))
|
||||
|
||||
API Changes:
|
||||
* Added `databricks account budget-policy` command group.
|
||||
* Added `databricks lakeview-embedded` command group.
|
||||
* Added `databricks query-execution` command group.
|
||||
* Added `databricks account enable-ip-access-lists` command group.
|
||||
* Added `databricks redash-config` command group.
|
||||
|
||||
OpenAPI commit c72c58f97b950fcb924a90ef164bcb10cfcd5ece (2025-02-03)
|
||||
Dependency updates:
|
||||
* Upgrade to TF provider 1.65.1 ([#2328](https://github.com/databricks/cli/pull/2328)).
|
||||
* Bump github.com/hashicorp/terraform-exec from 0.21.0 to 0.22.0 ([#2237](https://github.com/databricks/cli/pull/2237)).
|
||||
* Bump github.com/spf13/pflag from 1.0.5 to 1.0.6 ([#2281](https://github.com/databricks/cli/pull/2281)).
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.56.1 to 0.57.0 ([#2321](https://github.com/databricks/cli/pull/2321)).
|
||||
* Bump golang.org/x/oauth2 from 0.25.0 to 0.26.0 ([#2322](https://github.com/databricks/cli/pull/2322)).
|
||||
* Bump golang.org/x/term from 0.28.0 to 0.29.0 ([#2325](https://github.com/databricks/cli/pull/2325)).
|
||||
* Bump golang.org/x/text from 0.21.0 to 0.22.0 ([#2323](https://github.com/databricks/cli/pull/2323)).
|
||||
* Bump golang.org/x/mod from 0.22.0 to 0.23.0 ([#2324](https://github.com/databricks/cli/pull/2324)).
|
||||
|
||||
## [Release] Release v0.240.0
|
||||
|
||||
Bundles:
|
||||
* Added support for double underscore variable references ([#2203](https://github.com/databricks/cli/pull/2203)).
|
||||
* Do not wait for app compute to start on `bundle deploy` ([#2144](https://github.com/databricks/cli/pull/2144)).
|
||||
* Remove bundle.git.inferred ([#2258](https://github.com/databricks/cli/pull/2258)).
|
||||
* libs/python: Remove DetectInterpreters ([#2234](https://github.com/databricks/cli/pull/2234)).
|
||||
|
||||
API Changes:
|
||||
* Added `databricks access-control` command group.
|
||||
* Added `databricks serving-endpoints http-request` command.
|
||||
* Changed `databricks serving-endpoints create` command with new required argument order.
|
||||
* Changed `databricks serving-endpoints get-open-api` command return type to become non-empty.
|
||||
* Changed `databricks recipients update` command return type to become non-empty.
|
||||
|
||||
OpenAPI commit 0be1b914249781b5e903b7676fd02255755bc851 (2025-01-22)
|
||||
Dependency updates:
|
||||
* Bump github.com/databricks/databricks-sdk-go from 0.55.0 to 0.56.1 ([#2238](https://github.com/databricks/cli/pull/2238)).
|
||||
* Upgrade TF provider to 1.64.1 ([#2247](https://github.com/databricks/cli/pull/2247)).
|
||||
|
||||
## [Release] Release v0.239.1
|
||||
|
||||
CLI:
|
||||
|
|
23
Makefile
23
Makefile
|
@ -1,4 +1,4 @@
|
|||
default: vendor fmt lint
|
||||
default: vendor fmt lint tidy
|
||||
|
||||
PACKAGES=./acceptance/... ./libs/... ./internal/... ./cmd/... ./bundle/... .
|
||||
|
||||
|
@ -9,6 +9,10 @@ GOTESTSUM_CMD ?= gotestsum --format ${GOTESTSUM_FORMAT} --no-summary=skipped
|
|||
lint:
|
||||
golangci-lint run --fix
|
||||
|
||||
tidy:
|
||||
@# not part of golangci-lint, apparently
|
||||
go mod tidy
|
||||
|
||||
lintcheck:
|
||||
golangci-lint run ./...
|
||||
|
||||
|
@ -16,7 +20,7 @@ lintcheck:
|
|||
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
|
||||
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
|
||||
fmt:
|
||||
ruff format -q
|
||||
ruff format -qn
|
||||
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
|
||||
|
||||
test:
|
||||
|
@ -24,7 +28,7 @@ test:
|
|||
|
||||
cover:
|
||||
rm -fr ./acceptance/build/cover/
|
||||
CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
||||
VERBOSE_TEST=1 CLI_GOCOVERDIR=build/cover ${GOTESTSUM_CMD} -- -coverprofile=coverage.txt ${PACKAGES}
|
||||
rm -fr ./acceptance/build/cover-merged/
|
||||
mkdir -p acceptance/build/cover-merged/
|
||||
go tool covdata merge -i $$(printf '%s,' acceptance/build/cover/* | sed 's/,$$//') -o acceptance/build/cover-merged/
|
||||
|
@ -48,12 +52,15 @@ vendor:
|
|||
schema:
|
||||
go run ./bundle/internal/schema ./bundle/internal/schema ./bundle/schema/jsonschema.json
|
||||
|
||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -parallel 4 -timeout=2h
|
||||
docs:
|
||||
go run ./bundle/docsgen ./bundle/internal/schema ./bundle/docsgen
|
||||
|
||||
integration:
|
||||
INTEGRATION = gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./acceptance ./integration/..." -- -parallel 4 -timeout=2h
|
||||
|
||||
integration: vendor
|
||||
$(INTEGRATION)
|
||||
|
||||
integration-short:
|
||||
$(INTEGRATION) -short
|
||||
integration-short: vendor
|
||||
VERBOSE_TEST=1 $(INTEGRATION) -short
|
||||
|
||||
.PHONY: lint lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover
|
||||
.PHONY: lint tidy lintcheck fmt test cover showcover build snapshot vendor schema integration integration-short acc-cover acc-showcover docs
|
||||
|
|
9
NOTICE
9
NOTICE
|
@ -109,3 +109,12 @@ License - https://github.com/hexops/gotextdiff/blob/main/LICENSE
|
|||
https://github.com/BurntSushi/toml
|
||||
Copyright (c) 2013 TOML authors
|
||||
https://github.com/BurntSushi/toml/blob/master/COPYING
|
||||
|
||||
dario.cat/mergo
|
||||
Copyright (c) 2013 Dario Castañé. All rights reserved.
|
||||
Copyright (c) 2012 The Go Authors. All rights reserved.
|
||||
https://github.com/darccio/mergo/blob/master/LICENSE
|
||||
|
||||
https://github.com/gorilla/mux
|
||||
Copyright (c) 2023 The Gorilla Authors. All rights reserved.
|
||||
https://github.com/gorilla/mux/blob/main/LICENSE
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
build
|
|
@ -2,28 +2,41 @@ package acceptance_test
|
|||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/uuid"
|
||||
|
||||
"github.com/databricks/cli/internal/testutil"
|
||||
"github.com/databricks/cli/libs/env"
|
||||
"github.com/databricks/cli/libs/testdiff"
|
||||
"github.com/databricks/cli/libs/testserver"
|
||||
"github.com/databricks/databricks-sdk-go"
|
||||
"github.com/databricks/databricks-sdk-go/service/iam"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var KeepTmp bool
|
||||
var (
|
||||
KeepTmp bool
|
||||
NoRepl bool
|
||||
VerboseTest bool = os.Getenv("VERBOSE_TEST") != ""
|
||||
)
|
||||
|
||||
// In order to debug CLI running under acceptance test, set this to full subtest name, e.g. "bundle/variables/empty"
|
||||
// Then install your breakpoints and click "debug test" near TestAccept in VSCODE.
|
||||
|
@ -38,12 +51,16 @@ var InprocessMode bool
|
|||
func init() {
|
||||
flag.BoolVar(&InprocessMode, "inprocess", SingleTest != "", "Run CLI in the same process as test (for debugging)")
|
||||
flag.BoolVar(&KeepTmp, "keeptmp", false, "Do not delete TMP directory after run")
|
||||
flag.BoolVar(&NoRepl, "norepl", false, "Do not apply any replacements (for debugging)")
|
||||
}
|
||||
|
||||
const (
|
||||
EntryPointScript = "script"
|
||||
CleanupScript = "script.cleanup"
|
||||
PrepareScript = "script.prepare"
|
||||
MaxFileSize = 100_000
|
||||
// Filename to save replacements to (used by diff.py)
|
||||
ReplsFile = "repls.json"
|
||||
)
|
||||
|
||||
var Scripts = map[string]bool{
|
||||
|
@ -52,6 +69,10 @@ var Scripts = map[string]bool{
|
|||
PrepareScript: true,
|
||||
}
|
||||
|
||||
var Ignored = map[string]bool{
|
||||
ReplsFile: true,
|
||||
}
|
||||
|
||||
func TestAccept(t *testing.T) {
|
||||
testAccept(t, InprocessMode, SingleTest)
|
||||
}
|
||||
|
@ -60,7 +81,8 @@ func TestInprocessMode(t *testing.T) {
|
|||
if InprocessMode {
|
||||
t.Skip("Already tested by TestAccept")
|
||||
}
|
||||
require.Equal(t, 1, testAccept(t, true, "selftest"))
|
||||
require.Equal(t, 1, testAccept(t, true, "selftest/basic"))
|
||||
require.Equal(t, 1, testAccept(t, true, "selftest/server"))
|
||||
}
|
||||
|
||||
func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
||||
|
@ -68,6 +90,11 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
|||
cwd, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
|
||||
buildDir := filepath.Join(cwd, "build", fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH))
|
||||
|
||||
// Download terraform and provider and create config; this also creates build directory.
|
||||
RunCommand(t, []string{"python3", filepath.Join(cwd, "install_terraform.py"), "--targetdir", buildDir}, ".")
|
||||
|
||||
coverDir := os.Getenv("CLI_GOCOVERDIR")
|
||||
|
||||
if coverDir != "" {
|
||||
|
@ -84,46 +111,58 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
|||
t.Setenv("CMD_SERVER_URL", cmdServer.URL)
|
||||
execPath = filepath.Join(cwd, "bin", "callserver.py")
|
||||
} else {
|
||||
execPath = BuildCLI(t, cwd, coverDir)
|
||||
execPath = BuildCLI(t, buildDir, coverDir)
|
||||
}
|
||||
|
||||
t.Setenv("CLI", execPath)
|
||||
repls.SetPath(execPath, "$CLI")
|
||||
repls.SetPath(execPath, "[CLI]")
|
||||
|
||||
// Make helper scripts available
|
||||
t.Setenv("PATH", fmt.Sprintf("%s%c%s", filepath.Join(cwd, "bin"), os.PathListSeparator, os.Getenv("PATH")))
|
||||
|
||||
tempHomeDir := t.TempDir()
|
||||
repls.SetPath(tempHomeDir, "$TMPHOME")
|
||||
repls.SetPath(tempHomeDir, "[TMPHOME]")
|
||||
t.Logf("$TMPHOME=%v", tempHomeDir)
|
||||
|
||||
// Prevent CLI from downloading terraform in each test:
|
||||
t.Setenv("DATABRICKS_TF_EXEC_PATH", tempHomeDir)
|
||||
// Make use of uv cache; since we set HomeEnvVar to temporary directory, it is not picked up automatically
|
||||
uvCache := getUVDefaultCacheDir(t)
|
||||
t.Setenv("UV_CACHE_DIR", uvCache)
|
||||
|
||||
ctx := context.Background()
|
||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||
|
||||
if cloudEnv == "" {
|
||||
server := StartServer(t)
|
||||
AddHandlers(server)
|
||||
// Redirect API access to local server:
|
||||
t.Setenv("DATABRICKS_HOST", server.URL)
|
||||
t.Setenv("DATABRICKS_TOKEN", "dapi1234")
|
||||
defaultServer := testserver.New(t)
|
||||
AddHandlers(defaultServer)
|
||||
t.Setenv("DATABRICKS_DEFAULT_HOST", defaultServer.URL)
|
||||
|
||||
homeDir := t.TempDir()
|
||||
// Do not read user's ~/.databrickscfg
|
||||
t.Setenv(env.HomeEnvVar(), homeDir)
|
||||
}
|
||||
|
||||
workspaceClient, err := databricks.NewWorkspaceClient()
|
||||
require.NoError(t, err)
|
||||
terraformrcPath := filepath.Join(buildDir, ".terraformrc")
|
||||
t.Setenv("TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||
t.Setenv("DATABRICKS_TF_CLI_CONFIG_FILE", terraformrcPath)
|
||||
repls.SetPath(terraformrcPath, "[DATABRICKS_TF_CLI_CONFIG_FILE]")
|
||||
|
||||
user, err := workspaceClient.CurrentUser.Me(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, user)
|
||||
testdiff.PrepareReplacementsUser(t, &repls, *user)
|
||||
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||
terraformExecPath := filepath.Join(buildDir, "terraform")
|
||||
if runtime.GOOS == "windows" {
|
||||
terraformExecPath += ".exe"
|
||||
}
|
||||
t.Setenv("DATABRICKS_TF_EXEC_PATH", terraformExecPath)
|
||||
t.Setenv("TERRAFORM", terraformExecPath)
|
||||
repls.SetPath(terraformExecPath, "[TERRAFORM]")
|
||||
|
||||
// do it last so that full paths match first:
|
||||
repls.SetPath(buildDir, "[BUILD_DIR]")
|
||||
|
||||
testdiff.PrepareReplacementsDevVersion(t, &repls)
|
||||
testdiff.PrepareReplacementSdkVersion(t, &repls)
|
||||
testdiff.PrepareReplacementsGoVersion(t, &repls)
|
||||
|
||||
repls.SetPath(cwd, "[TESTROOT]")
|
||||
|
||||
repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"})
|
||||
|
||||
testDirs := getTests(t)
|
||||
require.NotEmpty(t, testDirs)
|
||||
|
@ -136,8 +175,7 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int {
|
|||
}
|
||||
|
||||
for _, dir := range testDirs {
|
||||
testName := strings.ReplaceAll(dir, "\\", "/")
|
||||
t.Run(testName, func(t *testing.T) {
|
||||
t.Run(dir, func(t *testing.T) {
|
||||
if !InprocessMode {
|
||||
t.Parallel()
|
||||
}
|
||||
|
@ -159,7 +197,8 @@ func getTests(t *testing.T) []string {
|
|||
name := filepath.Base(path)
|
||||
if name == EntryPointScript {
|
||||
// Presence of 'script' marks a test case in this directory
|
||||
testDirs = append(testDirs, filepath.Dir(path))
|
||||
testName := filepath.ToSlash(filepath.Dir(path))
|
||||
testDirs = append(testDirs, testName)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
@ -177,6 +216,15 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
t.Skipf("Disabled via GOOS.%s setting in %s", runtime.GOOS, configPath)
|
||||
}
|
||||
|
||||
cloudEnv := os.Getenv("CLOUD_ENV")
|
||||
if !isTruePtr(config.Local) && cloudEnv == "" {
|
||||
t.Skipf("Disabled via Local setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
|
||||
}
|
||||
|
||||
if !isTruePtr(config.Cloud) && cloudEnv != "" {
|
||||
t.Skipf("Disabled via Cloud setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
|
||||
}
|
||||
|
||||
var tmpDir string
|
||||
var err error
|
||||
if KeepTmp {
|
||||
|
@ -189,8 +237,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
tmpDir = t.TempDir()
|
||||
}
|
||||
|
||||
repls.SetPathWithParents(tmpDir, "$TMPDIR")
|
||||
repls.Repls = append(repls.Repls, config.Repls...)
|
||||
repls.SetPathWithParents(tmpDir, "[TMPDIR]")
|
||||
|
||||
scriptContents := readMergedScriptContents(t, dir)
|
||||
testutil.WriteFile(t, filepath.Join(tmpDir, EntryPointScript), scriptContents)
|
||||
|
@ -202,6 +249,101 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
|
||||
args := []string{"bash", "-euo", "pipefail", EntryPointScript}
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Env = os.Environ()
|
||||
|
||||
var workspaceClient *databricks.WorkspaceClient
|
||||
var user iam.User
|
||||
|
||||
// Start a new server with a custom configuration if the acceptance test
|
||||
// specifies a custom server stubs.
|
||||
var server *testserver.Server
|
||||
|
||||
if cloudEnv == "" {
|
||||
// Start a new server for this test if either:
|
||||
// 1. A custom server spec is defined in the test configuration.
|
||||
// 2. The test is configured to record requests and assert on them. We need
|
||||
// a duplicate of the default server to record requests because the default
|
||||
// server otherwise is a shared resource.
|
||||
|
||||
databricksLocalHost := os.Getenv("DATABRICKS_DEFAULT_HOST")
|
||||
|
||||
if len(config.Server) > 0 || isTruePtr(config.RecordRequests) {
|
||||
server = testserver.New(t)
|
||||
if isTruePtr(config.RecordRequests) {
|
||||
requestsPath := filepath.Join(tmpDir, "out.requests.txt")
|
||||
server.RecordRequestsCallback = func(request *testserver.Request) {
|
||||
req := getLoggedRequest(request, config.IncludeRequestHeaders)
|
||||
reqJson, err := json.MarshalIndent(req, "", " ")
|
||||
assert.NoErrorf(t, err, "Failed to indent: %#v", req)
|
||||
|
||||
reqJsonWithRepls := repls.Replace(string(reqJson))
|
||||
|
||||
f, err := os.OpenFile(requestsPath, os.O_CREATE|os.O_APPEND|os.O_WRONLY, 0o644)
|
||||
assert.NoError(t, err)
|
||||
defer f.Close()
|
||||
|
||||
_, err = f.WriteString(reqJsonWithRepls + "\n")
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
}
|
||||
|
||||
// We want later stubs takes precedence, because then leaf configs take precedence over parent directory configs
|
||||
// In gorilla/mux earlier handlers take precedence, so we need to reverse the order
|
||||
slices.Reverse(config.Server)
|
||||
|
||||
for _, stub := range config.Server {
|
||||
require.NotEmpty(t, stub.Pattern)
|
||||
items := strings.Split(stub.Pattern, " ")
|
||||
require.Len(t, items, 2)
|
||||
server.Handle(items[0], items[1], func(req testserver.Request) any {
|
||||
return stub.Response
|
||||
})
|
||||
}
|
||||
|
||||
// The earliest handlers take precedence, add default handlers last
|
||||
AddHandlers(server)
|
||||
databricksLocalHost = server.URL
|
||||
}
|
||||
|
||||
// Each local test should use a new token that will result into a new fake workspace,
|
||||
// so that test don't interfere with each other.
|
||||
tokenSuffix := strings.ReplaceAll(uuid.NewString(), "-", "")
|
||||
config := databricks.Config{
|
||||
Host: databricksLocalHost,
|
||||
Token: "dbapi" + tokenSuffix,
|
||||
}
|
||||
workspaceClient, err = databricks.NewWorkspaceClient(&config)
|
||||
require.NoError(t, err)
|
||||
|
||||
cmd.Env = append(cmd.Env, "DATABRICKS_HOST="+config.Host)
|
||||
cmd.Env = append(cmd.Env, "DATABRICKS_TOKEN="+config.Token)
|
||||
|
||||
// For the purposes of replacements, use testUser.
|
||||
// Note, users might have overriden /api/2.0/preview/scim/v2/Me but that should not affect the replacement:
|
||||
user = testUser
|
||||
} else {
|
||||
// Use whatever authentication mechanism is configured by the test runner.
|
||||
workspaceClient, err = databricks.NewWorkspaceClient(&databricks.Config{})
|
||||
require.NoError(t, err)
|
||||
pUser, err := workspaceClient.CurrentUser.Me(context.Background())
|
||||
require.NoError(t, err, "Failed to get current user")
|
||||
user = *pUser
|
||||
}
|
||||
|
||||
testdiff.PrepareReplacementsUser(t, &repls, user)
|
||||
testdiff.PrepareReplacementsWorkspaceClient(t, &repls, workspaceClient)
|
||||
|
||||
// Must be added PrepareReplacementsUser, otherwise conflicts with [USERNAME]
|
||||
testdiff.PrepareReplacementsUUID(t, &repls)
|
||||
|
||||
// User replacements come last:
|
||||
repls.Repls = append(repls.Repls, config.Repls...)
|
||||
|
||||
// Save replacements to temp test directory so that it can be read by diff.py
|
||||
replsJson, err := json.MarshalIndent(repls.Repls, "", " ")
|
||||
require.NoError(t, err)
|
||||
testutil.WriteFile(t, filepath.Join(tmpDir, ReplsFile), string(replsJson))
|
||||
|
||||
if coverDir != "" {
|
||||
// Creating individual coverage directory for each test, because writing to the same one
|
||||
// results in sporadic failures like this one (only if tests are running in parallel):
|
||||
|
@ -209,9 +351,13 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
coverDir = filepath.Join(coverDir, strings.ReplaceAll(dir, string(os.PathSeparator), "--"))
|
||||
err := os.MkdirAll(coverDir, os.ModePerm)
|
||||
require.NoError(t, err)
|
||||
cmd.Env = append(os.Environ(), "GOCOVERDIR="+coverDir)
|
||||
cmd.Env = append(cmd.Env, "GOCOVERDIR="+coverDir)
|
||||
}
|
||||
|
||||
absDir, err := filepath.Abs(dir)
|
||||
require.NoError(t, err)
|
||||
cmd.Env = append(cmd.Env, "TESTDIR="+absDir)
|
||||
|
||||
// Write combined output to a file
|
||||
out, err := os.Create(filepath.Join(tmpDir, "output.txt"))
|
||||
require.NoError(t, err)
|
||||
|
@ -233,6 +379,7 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
|
||||
// Make sure there are not unaccounted for new files
|
||||
files := ListDir(t, tmpDir)
|
||||
unexpected := []string{}
|
||||
for _, relPath := range files {
|
||||
if _, ok := inputs[relPath]; ok {
|
||||
continue
|
||||
|
@ -240,35 +387,47 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
if _, ok := outputs[relPath]; ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := Ignored[relPath]; ok {
|
||||
continue
|
||||
}
|
||||
if config.CompiledIgnoreObject.MatchesPath(relPath) {
|
||||
continue
|
||||
}
|
||||
unexpected = append(unexpected, relPath)
|
||||
if strings.HasPrefix(relPath, "out") {
|
||||
// We have a new file starting with "out"
|
||||
// Show the contents & support overwrite mode for it:
|
||||
doComparison(t, repls, dir, tmpDir, relPath, &printedRepls)
|
||||
}
|
||||
}
|
||||
|
||||
if len(unexpected) > 0 {
|
||||
t.Error("Test produced unexpected files:\n" + strings.Join(unexpected, "\n"))
|
||||
}
|
||||
}
|
||||
|
||||
func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirNew, relPath string, printedRepls *bool) {
|
||||
pathRef := filepath.Join(dirRef, relPath)
|
||||
pathNew := filepath.Join(dirNew, relPath)
|
||||
bufRef, okRef := readIfExists(t, pathRef)
|
||||
bufNew, okNew := readIfExists(t, pathNew)
|
||||
bufRef, okRef := tryReading(t, pathRef)
|
||||
bufNew, okNew := tryReading(t, pathNew)
|
||||
if !okRef && !okNew {
|
||||
t.Errorf("Both files are missing: %s, %s", pathRef, pathNew)
|
||||
t.Errorf("Both files are missing or have errors: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||
return
|
||||
}
|
||||
|
||||
valueRef := testdiff.NormalizeNewlines(string(bufRef))
|
||||
valueNew := testdiff.NormalizeNewlines(string(bufNew))
|
||||
valueRef := testdiff.NormalizeNewlines(bufRef)
|
||||
valueNew := testdiff.NormalizeNewlines(bufNew)
|
||||
|
||||
// Apply replacements to the new value only.
|
||||
// The reference value is stored after applying replacements.
|
||||
valueNew = repls.Replace(valueNew)
|
||||
if !NoRepl {
|
||||
valueNew = repls.Replace(valueNew)
|
||||
}
|
||||
|
||||
// The test did not produce an expected output file.
|
||||
if okRef && !okNew {
|
||||
t.Errorf("Missing output file: %s", relPath)
|
||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||
if testdiff.OverwriteMode {
|
||||
t.Logf("Removing output file: %s", relPath)
|
||||
require.NoError(t, os.Remove(pathRef))
|
||||
|
@ -278,7 +437,7 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
|
|||
|
||||
// The test produced an unexpected output file.
|
||||
if !okRef && okNew {
|
||||
t.Errorf("Unexpected output file: %s", relPath)
|
||||
t.Errorf("Unexpected output file: %s\npathRef: %s\npathNew: %s", relPath, pathRef, pathNew)
|
||||
testdiff.AssertEqualTexts(t, pathRef, pathNew, valueRef, valueNew)
|
||||
if testdiff.OverwriteMode {
|
||||
t.Logf("Writing output file: %s", relPath)
|
||||
|
@ -294,7 +453,7 @@ func doComparison(t *testing.T, repls testdiff.ReplacementsContext, dirRef, dirN
|
|||
testutil.WriteFile(t, pathRef, valueNew)
|
||||
}
|
||||
|
||||
if !equal && printedRepls != nil && !*printedRepls {
|
||||
if VerboseTest && !equal && printedRepls != nil && !*printedRepls {
|
||||
*printedRepls = true
|
||||
var items []string
|
||||
for _, item := range repls.Repls {
|
||||
|
@ -317,14 +476,14 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
|||
cleanups := []string{}
|
||||
|
||||
for {
|
||||
x, ok := readIfExists(t, filepath.Join(dir, CleanupScript))
|
||||
x, ok := tryReading(t, filepath.Join(dir, CleanupScript))
|
||||
if ok {
|
||||
cleanups = append(cleanups, string(x))
|
||||
cleanups = append(cleanups, x)
|
||||
}
|
||||
|
||||
x, ok = readIfExists(t, filepath.Join(dir, PrepareScript))
|
||||
x, ok = tryReading(t, filepath.Join(dir, PrepareScript))
|
||||
if ok {
|
||||
prepares = append(prepares, string(x))
|
||||
prepares = append(prepares, x)
|
||||
}
|
||||
|
||||
if dir == "" || dir == "." {
|
||||
|
@ -341,13 +500,12 @@ func readMergedScriptContents(t *testing.T, dir string) string {
|
|||
return strings.Join(prepares, "\n")
|
||||
}
|
||||
|
||||
func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
||||
execPath := filepath.Join(cwd, "build", "databricks")
|
||||
func BuildCLI(t *testing.T, buildDir, coverDir string) string {
|
||||
execPath := filepath.Join(buildDir, "databricks")
|
||||
if runtime.GOOS == "windows" {
|
||||
execPath += ".exe"
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
args := []string{
|
||||
"go", "build",
|
||||
"-mod", "vendor",
|
||||
|
@ -365,20 +523,7 @@ func BuildCLI(t *testing.T, cwd, coverDir string) string {
|
|||
args = append(args, "-buildvcs=false")
|
||||
}
|
||||
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = ".."
|
||||
out, err := cmd.CombinedOutput()
|
||||
elapsed := time.Since(start)
|
||||
t.Logf("%s took %s", args, elapsed)
|
||||
require.NoError(t, err, "go build failed: %s: %s\n%s", args, err, out)
|
||||
if len(out) > 0 {
|
||||
t.Logf("go build output: %s: %s", args, out)
|
||||
}
|
||||
|
||||
// Quick check + warm up cache:
|
||||
cmd = exec.Command(execPath, "--version")
|
||||
out, err = cmd.CombinedOutput()
|
||||
require.NoError(t, err, "%s --version failed: %s\n%s", execPath, err, out)
|
||||
RunCommand(t, args, "..")
|
||||
return execPath
|
||||
}
|
||||
|
||||
|
@ -411,16 +556,33 @@ func formatOutput(w io.Writer, err error) {
|
|||
}
|
||||
}
|
||||
|
||||
func readIfExists(t *testing.T, path string) ([]byte, bool) {
|
||||
data, err := os.ReadFile(path)
|
||||
if err == nil {
|
||||
return data, true
|
||||
func tryReading(t *testing.T, path string) (string, bool) {
|
||||
info, err := os.Stat(path)
|
||||
if err != nil {
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
t.Errorf("%s: %s", path, err)
|
||||
}
|
||||
return "", false
|
||||
}
|
||||
|
||||
if !errors.Is(err, os.ErrNotExist) {
|
||||
t.Fatalf("%s: %s", path, err)
|
||||
if info.Size() > MaxFileSize {
|
||||
t.Errorf("%s: ignoring, too large: %d", path, info.Size())
|
||||
return "", false
|
||||
}
|
||||
return []byte{}, false
|
||||
|
||||
data, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
// already checked ErrNotExist above
|
||||
t.Errorf("%s: %s", path, err)
|
||||
return "", false
|
||||
}
|
||||
|
||||
if !utf8.Valid(data) {
|
||||
t.Errorf("%s: not valid utf-8", path)
|
||||
return "", false
|
||||
}
|
||||
|
||||
return string(data), true
|
||||
}
|
||||
|
||||
func CopyDir(src, dst string, inputs, outputs map[string]bool) error {
|
||||
|
@ -486,3 +648,69 @@ func ListDir(t *testing.T, src string) []string {
|
|||
}
|
||||
return files
|
||||
}
|
||||
|
||||
func getUVDefaultCacheDir(t *testing.T) string {
|
||||
// According to uv docs https://docs.astral.sh/uv/concepts/cache/#caching-in-continuous-integration
|
||||
// the default cache directory is
|
||||
// "A system-appropriate cache directory, e.g., $XDG_CACHE_HOME/uv or $HOME/.cache/uv on Unix and %LOCALAPPDATA%\uv\cache on Windows"
|
||||
cacheDir, err := os.UserCacheDir()
|
||||
require.NoError(t, err)
|
||||
if runtime.GOOS == "windows" {
|
||||
return cacheDir + "\\uv\\cache"
|
||||
} else {
|
||||
return cacheDir + "/uv"
|
||||
}
|
||||
}
|
||||
|
||||
func RunCommand(t *testing.T, args []string, dir string) {
|
||||
start := time.Now()
|
||||
cmd := exec.Command(args[0], args[1:]...)
|
||||
cmd.Dir = dir
|
||||
out, err := cmd.CombinedOutput()
|
||||
elapsed := time.Since(start)
|
||||
t.Logf("%s took %s", args, elapsed)
|
||||
|
||||
require.NoError(t, err, "%s failed: %s\n%s", args, err, out)
|
||||
if len(out) > 0 {
|
||||
t.Logf("%s output: %s", args, out)
|
||||
}
|
||||
}
|
||||
|
||||
type LoggedRequest struct {
|
||||
Headers http.Header `json:"headers,omitempty"`
|
||||
Method string `json:"method"`
|
||||
Path string `json:"path"`
|
||||
Body any `json:"body,omitempty"`
|
||||
RawBody string `json:"raw_body,omitempty"`
|
||||
}
|
||||
|
||||
func getLoggedRequest(req *testserver.Request, includedHeaders []string) LoggedRequest {
|
||||
result := LoggedRequest{
|
||||
Method: req.Method,
|
||||
Path: req.URL.Path,
|
||||
Headers: filterHeaders(req.Headers, includedHeaders),
|
||||
}
|
||||
|
||||
if json.Valid(req.Body) {
|
||||
result.Body = json.RawMessage(req.Body)
|
||||
} else {
|
||||
result.RawBody = string(req.Body)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
func filterHeaders(h http.Header, includedHeaders []string) http.Header {
|
||||
headers := make(http.Header)
|
||||
for k, v := range h {
|
||||
if !slices.Contains(includedHeaders, k) {
|
||||
continue
|
||||
}
|
||||
headers[k] = v
|
||||
}
|
||||
return headers
|
||||
}
|
||||
|
||||
func isTruePtr(value *bool) bool {
|
||||
return value != nil && *value
|
||||
}
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
[DEFAULT]
|
||||
host = $DATABRICKS_HOST
|
||||
|
||||
[profile_name]
|
||||
host = https://test@non-existing-subdomain.databricks.com
|
|
@ -0,0 +1,14 @@
|
|||
bundle:
|
||||
name: test-auth
|
||||
|
||||
workspace:
|
||||
host: $DATABRICKS_HOST
|
||||
|
||||
targets:
|
||||
dev:
|
||||
default: true
|
||||
workspace:
|
||||
host: $DATABRICKS_HOST
|
||||
prod:
|
||||
workspace:
|
||||
host: https://bar.com
|
|
@ -0,0 +1,91 @@
|
|||
|
||||
=== Inside the bundle, no flags
|
||||
>>> errcode [CLI] current-user me
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, target flags
|
||||
>>> errcode [CLI] current-user me -t dev
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, target and matching profile
|
||||
>>> errcode [CLI] current-user me -t dev -p DEFAULT
|
||||
"[USERNAME]"
|
||||
|
||||
=== Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle.
|
||||
>>> errcode [CLI] current-user me -p profile_name
|
||||
Error: Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Inside the bundle, target and not matching profile
|
||||
>>> errcode [CLI] current-user me -t dev -p profile_name
|
||||
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Bundle commands load bundle configuration when no flags, validation OK
|
||||
>>> errcode [CLI] bundle validate
|
||||
Name: test-auth
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: [DATABRICKS_TARGET]
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
|
||||
|
||||
Validation OK!
|
||||
|
||||
=== Bundle commands load bundle configuration with -t flag, validation OK
|
||||
>>> errcode [CLI] bundle validate -t dev
|
||||
Name: test-auth
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: [DATABRICKS_TARGET]
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
|
||||
|
||||
Validation OK!
|
||||
|
||||
=== Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)
|
||||
>>> errcode [CLI] bundle validate -p profile_name
|
||||
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
|
||||
|
||||
Name: test-auth
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: [DATABRICKS_TARGET]
|
||||
|
||||
Found 1 error
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)
|
||||
>>> errcode [CLI] bundle validate -t dev -p DEFAULT
|
||||
Name: test-auth
|
||||
Target: dev
|
||||
Workspace:
|
||||
Host: [DATABRICKS_TARGET]
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
|
||||
|
||||
Validation OK!
|
||||
|
||||
=== Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)
|
||||
>>> errcode [CLI] bundle validate -t prod -p DEFAULT
|
||||
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host [DATABRICKS_TARGET], but CLI configured to use https://bar.com
|
||||
|
||||
Name: test-auth
|
||||
Target: prod
|
||||
Workspace:
|
||||
Host: https://bar.com
|
||||
|
||||
Found 1 error
|
||||
|
||||
Exit code: 1
|
||||
|
||||
=== Outside the bundle, no flags
|
||||
>>> errcode [CLI] current-user me
|
||||
"[USERNAME]"
|
||||
|
||||
=== Outside the bundle, profile flag
|
||||
>>> errcode [CLI] current-user me -p profile_name
|
||||
"[USERNAME]"
|
|
@ -0,0 +1,45 @@
|
|||
# Replace placeholder with an actual host URL
|
||||
envsubst < databricks.yml > out.yml && mv out.yml databricks.yml
|
||||
envsubst < .databrickscfg > out && mv out .databrickscfg
|
||||
export DATABRICKS_CONFIG_FILE=.databrickscfg
|
||||
|
||||
host=$DATABRICKS_HOST
|
||||
unset DATABRICKS_HOST
|
||||
|
||||
title "Inside the bundle, no flags"
|
||||
trace errcode $CLI current-user me | jq .userName
|
||||
|
||||
title "Inside the bundle, target flags"
|
||||
trace errcode $CLI current-user me -t dev | jq .userName
|
||||
|
||||
title "Inside the bundle, target and matching profile"
|
||||
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
|
||||
|
||||
title "Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle."
|
||||
trace errcode $CLI current-user me -p profile_name | jq .userName
|
||||
|
||||
title "Inside the bundle, target and not matching profile"
|
||||
trace errcode $CLI current-user me -t dev -p profile_name
|
||||
|
||||
title "Bundle commands load bundle configuration when no flags, validation OK"
|
||||
trace errcode $CLI bundle validate
|
||||
|
||||
title "Bundle commands load bundle configuration with -t flag, validation OK"
|
||||
trace errcode $CLI bundle validate -t dev
|
||||
|
||||
title "Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)"
|
||||
trace errcode $CLI bundle validate -p profile_name
|
||||
|
||||
title "Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)"
|
||||
trace errcode $CLI bundle validate -t dev -p DEFAULT
|
||||
|
||||
title "Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)"
|
||||
trace errcode $CLI bundle validate -t prod -p DEFAULT
|
||||
|
||||
cd ..
|
||||
export DATABRICKS_HOST=$host
|
||||
title "Outside the bundle, no flags"
|
||||
trace errcode $CLI current-user me | jq .userName
|
||||
|
||||
title "Outside the bundle, profile flag"
|
||||
trace errcode $CLI current-user me -p profile_name | jq .userName
|
|
@ -0,0 +1,14 @@
|
|||
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
|
||||
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
|
||||
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
|
||||
[[Repls]]
|
||||
Old='DATABRICKS_HOST'
|
||||
New='DATABRICKS_TARGET'
|
||||
|
||||
[[Repls]]
|
||||
Old='DATABRICKS_URL'
|
||||
New='DATABRICKS_TARGET'
|
||||
|
||||
[[Repls]]
|
||||
Old='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": .*'
|
||||
New='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)'
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"headers": {
|
||||
"Authorization": [
|
||||
"Basic [ENCODED_AUTH]"
|
||||
],
|
||||
"User-Agent": [
|
||||
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/basic"
|
||||
]
|
||||
},
|
||||
"method": "GET",
|
||||
"path": "/api/2.0/preview/scim/v2/Me"
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"id":"[USERID]",
|
||||
"userName":"[USERNAME]"
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
# Unset the token which is configured by default
|
||||
# in acceptance tests
|
||||
export DATABRICKS_TOKEN=""
|
||||
|
||||
export DATABRICKS_USERNAME=username
|
||||
export DATABRICKS_PASSWORD=password
|
||||
|
||||
$CLI current-user me
|
|
@ -0,0 +1,4 @@
|
|||
# "username:password" in base64 is dXNlcm5hbWU6cGFzc3dvcmQ=, expect to see this in Authorization header
|
||||
[[Repls]]
|
||||
Old = "dXNlcm5hbWU6cGFzc3dvcmQ="
|
||||
New = "[ENCODED_AUTH]"
|
|
@ -0,0 +1,34 @@
|
|||
{
|
||||
"headers": {
|
||||
"User-Agent": [
|
||||
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
|
||||
]
|
||||
},
|
||||
"method": "GET",
|
||||
"path": "/oidc/.well-known/oauth-authorization-server"
|
||||
}
|
||||
{
|
||||
"headers": {
|
||||
"Authorization": [
|
||||
"Basic [ENCODED_AUTH]"
|
||||
],
|
||||
"User-Agent": [
|
||||
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS]"
|
||||
]
|
||||
},
|
||||
"method": "POST",
|
||||
"path": "/oidc/v1/token",
|
||||
"raw_body": "grant_type=client_credentials\u0026scope=all-apis"
|
||||
}
|
||||
{
|
||||
"headers": {
|
||||
"Authorization": [
|
||||
"Bearer oauth-token"
|
||||
],
|
||||
"User-Agent": [
|
||||
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/oauth-m2m"
|
||||
]
|
||||
},
|
||||
"method": "GET",
|
||||
"path": "/api/2.0/preview/scim/v2/Me"
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"id":"[USERID]",
|
||||
"userName":"[USERNAME]"
|
||||
}
|
|
@ -0,0 +1,8 @@
|
|||
# Unset the token which is configured by default
|
||||
# in acceptance tests
|
||||
export DATABRICKS_TOKEN=""
|
||||
|
||||
export DATABRICKS_CLIENT_ID=client_id
|
||||
export DATABRICKS_CLIENT_SECRET=client_secret
|
||||
|
||||
$CLI current-user me
|
|
@ -0,0 +1,5 @@
|
|||
# "client_id:client_secret" in base64 is Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ=, expect to
|
||||
# see this in Authorization header
|
||||
[[Repls]]
|
||||
Old = "Y2xpZW50X2lkOmNsaWVudF9zZWNyZXQ="
|
||||
New = "[ENCODED_AUTH]"
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"headers": {
|
||||
"Authorization": [
|
||||
"Bearer dapi1234"
|
||||
],
|
||||
"User-Agent": [
|
||||
"cli/[DEV_VERSION] databricks-sdk-go/[SDK_VERSION] go/[GO_VERSION] os/[OS] cmd/current-user_me cmd-exec-id/[UUID] auth/pat"
|
||||
]
|
||||
},
|
||||
"method": "GET",
|
||||
"path": "/api/2.0/preview/scim/v2/Me"
|
||||
}
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"id":"[USERID]",
|
||||
"userName":"[USERNAME]"
|
||||
}
|
|
@ -0,0 +1,3 @@
|
|||
export DATABRICKS_TOKEN=dapi1234
|
||||
|
||||
$CLI current-user me
|
|
@ -0,0 +1,18 @@
|
|||
RecordRequests = true
|
||||
IncludeRequestHeaders = ["Authorization", "User-Agent"]
|
||||
|
||||
[[Repls]]
|
||||
Old = '(linux|darwin|windows)'
|
||||
New = '[OS]'
|
||||
|
||||
[[Repls]]
|
||||
Old = " upstream/[A-Za-z0-9.-]+"
|
||||
New = ""
|
||||
|
||||
[[Repls]]
|
||||
Old = " upstream-version/[A-Za-z0-9.-]+"
|
||||
New = ""
|
||||
|
||||
[[Repls]]
|
||||
Old = " cicd/[A-Za-z0-9.-]+"
|
||||
New = ""
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env python3
|
||||
"""This script implements "diff -r -U2 dir1 dir2" but applies replacements first"""
|
||||
|
||||
import sys
|
||||
import difflib
|
||||
import json
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def replaceAll(patterns, s):
|
||||
for comp, new in patterns:
|
||||
s = comp.sub(new, s)
|
||||
return s
|
||||
|
||||
|
||||
def main():
|
||||
d1, d2 = sys.argv[1:]
|
||||
d1, d2 = Path(d1), Path(d2)
|
||||
|
||||
with open("repls.json") as f:
|
||||
repls = json.load(f)
|
||||
|
||||
patterns = []
|
||||
for r in repls:
|
||||
try:
|
||||
c = re.compile(r["Old"])
|
||||
patterns.append((c, r["New"]))
|
||||
except re.error as e:
|
||||
print(f"Regex error for pattern {r}: {e}", file=sys.stderr)
|
||||
|
||||
files1 = [str(p.relative_to(d1)) for p in d1.rglob("*") if p.is_file()]
|
||||
files2 = [str(p.relative_to(d2)) for p in d2.rglob("*") if p.is_file()]
|
||||
|
||||
set1 = set(files1)
|
||||
set2 = set(files2)
|
||||
|
||||
for f in sorted(set1 | set2):
|
||||
p1 = d1 / f
|
||||
p2 = d2 / f
|
||||
if f not in set2:
|
||||
print(f"Only in {d1}: {f}")
|
||||
elif f not in set1:
|
||||
print(f"Only in {d2}: {f}")
|
||||
else:
|
||||
a = replaceAll(patterns, p1.read_text()).splitlines(True)
|
||||
b = replaceAll(patterns, p2.read_text()).splitlines(True)
|
||||
if a != b:
|
||||
p1_str = p1.as_posix()
|
||||
p2_str = p2.as_posix()
|
||||
for line in difflib.unified_diff(a, b, p1_str, p2_str, "", "", 2):
|
||||
print(line, end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Usage: find.py <regex>
|
||||
Finds all files within current directory matching regex. The output is sorted and slashes are always forward.
|
||||
|
||||
If --expect N is provided, the number of matches must be N or error is printed.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("regex")
|
||||
parser.add_argument("--expect", type=int)
|
||||
args = parser.parse_args()
|
||||
|
||||
regex = re.compile(args.regex)
|
||||
result = []
|
||||
|
||||
for root, dirs, files in os.walk("."):
|
||||
for filename in files:
|
||||
path = os.path.join(root, filename).lstrip("./\\").replace("\\", "/")
|
||||
if regex.search(path):
|
||||
result.append(path)
|
||||
|
||||
result.sort()
|
||||
for item in result:
|
||||
print(item)
|
||||
sys.stdout.flush()
|
||||
|
||||
if args.expect is not None:
|
||||
if args.expect != len(result):
|
||||
sys.exit(f"Expected {args.expect}, got {len(result)}")
|
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Cross-platform set mtime with nanosecond precision.
|
||||
Usage: setmtime.py <timestamp> <filenames>
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import datetime
|
||||
|
||||
timestamp = sys.argv[1]
|
||||
ts, ns = timestamp.split(".")
|
||||
dt = datetime.datetime.strptime(ts, "%Y-%m-%d %H:%M:%S").replace(tzinfo=datetime.timezone.utc)
|
||||
ns = int(ns.ljust(9, "0"))
|
||||
ts = int(dt.timestamp()) * 10**9 + ns
|
||||
for filename in sys.argv[2:]:
|
||||
os.utime(filename, ns=(ts, ts))
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Helper to sort lines in text file. Similar to 'sort' but no dependence on locale or presence of 'sort' in PATH.
|
||||
"""
|
||||
|
||||
import sys
|
||||
|
||||
lines = sys.stdin.readlines()
|
||||
lines.sort()
|
||||
sys.stdout.write("".join(lines))
|
|
@ -1 +0,0 @@
|
|||
databricks
|
|
@ -0,0 +1,3 @@
|
|||
command:
|
||||
- python
|
||||
- app.py
|
|
@ -0,0 +1,8 @@
|
|||
bundle:
|
||||
name: apps_yaml
|
||||
|
||||
resources:
|
||||
apps:
|
||||
myapp:
|
||||
name: myapp
|
||||
source_code_path: ./app
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"method": "POST",
|
||||
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml",
|
||||
"raw_body": "command:\n - python\n - app.py\n"
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
>>> [CLI] bundle validate
|
||||
Name: apps_yaml
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default
|
||||
|
||||
Validation OK!
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,4 @@
|
|||
trace $CLI bundle validate
|
||||
trace $CLI bundle deploy
|
||||
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml")' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt
|
||||
rm out.requests.txt
|
|
@ -0,0 +1 @@
|
|||
print("Hello world!")
|
|
@ -0,0 +1,12 @@
|
|||
bundle:
|
||||
name: apps_config_section
|
||||
|
||||
resources:
|
||||
apps:
|
||||
myapp:
|
||||
name: myapp
|
||||
source_code_path: ./app
|
||||
config:
|
||||
command:
|
||||
- python
|
||||
- app.py
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"method": "POST",
|
||||
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml",
|
||||
"raw_body": "command:\n - python\n - app.py\n"
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
|
||||
>>> [CLI] bundle validate
|
||||
Warning: App config section detected
|
||||
|
||||
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
|
||||
|
||||
Name: apps_config_section
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default
|
||||
|
||||
Found 1 warning
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
Warning: App config section detected
|
||||
|
||||
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
trace $CLI bundle validate
|
||||
trace $CLI bundle deploy
|
||||
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml")' out.requests.txt > out.app.yml.txt
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,26 @@
|
|||
Cloud = false
|
||||
RecordRequests = true
|
||||
|
||||
Ignore = [
|
||||
'.databricks',
|
||||
]
|
||||
|
||||
[[Server]]
|
||||
Pattern = "POST /api/2.0/apps"
|
||||
|
||||
[[Server]]
|
||||
Pattern = "GET /api/2.0/apps/myapp"
|
||||
Response.Body = '''
|
||||
{
|
||||
"name": "myapp",
|
||||
"description": "",
|
||||
"compute_status": {
|
||||
"state": "ACTIVE",
|
||||
"message": "App compute is active."
|
||||
},
|
||||
"app_status": {
|
||||
"state": "RUNNING",
|
||||
"message": "Application is running."
|
||||
}
|
||||
}
|
||||
'''
|
|
@ -0,0 +1,54 @@
|
|||
bundle:
|
||||
name: same_name_libraries
|
||||
|
||||
variables:
|
||||
cluster:
|
||||
default:
|
||||
spark_version: 15.4.x-scala2.12
|
||||
node_type_id: i3.xlarge
|
||||
data_security_mode: SINGLE_USER
|
||||
num_workers: 0
|
||||
spark_conf:
|
||||
spark.master: "local[*, 4]"
|
||||
spark.databricks.cluster.profile: singleNode
|
||||
custom_tags:
|
||||
ResourceClass: SingleNode
|
||||
|
||||
artifacts:
|
||||
whl1:
|
||||
type: whl
|
||||
path: ./whl1
|
||||
whl2:
|
||||
type: whl
|
||||
path: ./whl2
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test:
|
||||
name: "test"
|
||||
tasks:
|
||||
- task_key: task1
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
||||
- pypi:
|
||||
package: test_package
|
||||
- task_key: task2
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl2/dist/*.whl
|
||||
- maven:
|
||||
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
|
||||
- task_key: task3
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
|
@ -0,0 +1,14 @@
|
|||
|
||||
>>> errcode [CLI] bundle deploy
|
||||
Building whl1...
|
||||
Building whl2...
|
||||
Error: Duplicate local library names: my_default_python-0.0.1-py3-none-any.whl
|
||||
at resources.jobs.test.tasks[0].libraries[0].whl
|
||||
resources.jobs.test.tasks[1].libraries[0].whl
|
||||
in databricks.yml:36:15
|
||||
databricks.yml:45:15
|
||||
|
||||
Local library names must be unique but found libraries with the same name: whl1/dist/my_default_python-0.0.1-py3-none-any.whl, whl2/dist/my_default_python-0.0.1-py3-none-any.whl
|
||||
|
||||
|
||||
Exit code: 1
|
|
@ -0,0 +1,2 @@
|
|||
trace errcode $CLI bundle deploy
|
||||
rm -rf whl1 whl2
|
|
@ -0,0 +1,5 @@
|
|||
RecordRequests = false
|
||||
|
||||
[[Repls]]
|
||||
Old = '\\'
|
||||
New = '/'
|
|
@ -0,0 +1,28 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_default_python
|
||||
|
||||
setup(
|
||||
name="my_default_python",
|
||||
version=my_default_python.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_default_python/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_default_python.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,28 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_default_python
|
||||
|
||||
setup(
|
||||
name="my_default_python",
|
||||
version=my_default_python.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_default_python/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_default_python.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.0.1"
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,9 @@
|
|||
export PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
uv venv -q --python 3.12 .venv
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
source .venv/Scripts/activate
|
||||
else
|
||||
source .venv/bin/activate
|
||||
fi
|
||||
uv pip install -q setuptools
|
|
@ -0,0 +1,18 @@
|
|||
Cloud = false
|
||||
RecordRequests = true
|
||||
Ignore = [
|
||||
'.venv',
|
||||
'dist',
|
||||
'build',
|
||||
'*egg-info',
|
||||
'.databricks',
|
||||
]
|
||||
|
||||
[[Server]]
|
||||
Pattern = "GET /api/2.1/clusters/get"
|
||||
Response.Body = '''
|
||||
{
|
||||
"cluster_id": "0717-132531-5opeqon1",
|
||||
"spark_version": "13.3.x-scala2.12"
|
||||
}
|
||||
'''
|
|
@ -0,0 +1,56 @@
|
|||
bundle:
|
||||
name: unique_name_libraries
|
||||
|
||||
variables:
|
||||
cluster:
|
||||
default:
|
||||
spark_version: 15.4.x-scala2.12
|
||||
node_type_id: i3.xlarge
|
||||
data_security_mode: SINGLE_USER
|
||||
num_workers: 0
|
||||
spark_conf:
|
||||
spark.master: "local[*, 4]"
|
||||
spark.databricks.cluster.profile: singleNode
|
||||
custom_tags:
|
||||
ResourceClass: SingleNode
|
||||
|
||||
artifacts:
|
||||
whl1:
|
||||
type: whl
|
||||
path: ./whl1
|
||||
whl2:
|
||||
type: whl
|
||||
path: ./whl2
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test:
|
||||
name: "test"
|
||||
tasks:
|
||||
- task_key: task1
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_package
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
||||
- whl: cowsay
|
||||
- pypi:
|
||||
package: test_package
|
||||
- task_key: task2
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_other_package
|
||||
libraries:
|
||||
- whl: ./whl2/dist/*.whl
|
||||
- whl: cowsay
|
||||
- maven:
|
||||
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
|
||||
- task_key: task3
|
||||
new_cluster: ${var.cluster}
|
||||
python_wheel_task:
|
||||
entry_point: main
|
||||
package_name: my_default_python
|
||||
libraries:
|
||||
- whl: ./whl1/dist/*.whl
|
|
@ -0,0 +1,10 @@
|
|||
|
||||
>>> errcode [CLI] bundle deploy
|
||||
Building whl1...
|
||||
Building whl2...
|
||||
Uploading [package name]...
|
||||
Uploading [package name]...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,2 @@
|
|||
trace errcode $CLI bundle deploy
|
||||
rm -rf whl1 whl2
|
|
@ -0,0 +1,6 @@
|
|||
RecordRequests = false
|
||||
|
||||
# The order in which files are uploaded can be different, so we just replace the name
|
||||
[[Repls]]
|
||||
Old="Uploading (my_package|my_other_package)-0.0.1-py3-none-any.whl"
|
||||
New="Uploading [package name]"
|
|
@ -0,0 +1,28 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_package
|
||||
|
||||
setup(
|
||||
name="my_package",
|
||||
version=my_package.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_package/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_package.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.0.1"
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,28 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append("./src")
|
||||
|
||||
import my_other_package
|
||||
|
||||
setup(
|
||||
name="my_other_package",
|
||||
version=my_other_package.__version__,
|
||||
url="https://databricks.com",
|
||||
author="[USERNAME]",
|
||||
description="wheel file based on my_other_package/src",
|
||||
packages=find_packages(where="./src"),
|
||||
package_dir={"": "src"},
|
||||
entry_points={
|
||||
"packages": [
|
||||
"main=my_other_package.main:main",
|
||||
],
|
||||
},
|
||||
install_requires=[
|
||||
# Dependencies in case the output wheel file is used as a library dependency.
|
||||
# For defining dependencies, when this package is used in Databricks, see:
|
||||
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
|
||||
"setuptools"
|
||||
],
|
||||
)
|
|
@ -0,0 +1 @@
|
|||
__version__ = "0.0.1"
|
|
@ -0,0 +1 @@
|
|||
print("hello")
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
=== Expecting to find no wheels
|
||||
>>> errcode find.py --expect 0 whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "dbfs:/path/to/dist/mywheel.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting no wheels to be uploaded
|
||||
>>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl
|
||||
|
||||
Exit code: 1
|
|
@ -0,0 +1,12 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
title "Expecting to find no wheels"
|
||||
trace errcode find.py --expect 0 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting no wheels to be uploaded"
|
||||
trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl'
|
||||
|
||||
rm out.requests.txt
|
|
@ -5,7 +5,8 @@ artifacts:
|
|||
my_test_code:
|
||||
type: whl
|
||||
path: "./my_test_code"
|
||||
build: "python3 setup.py bdist_wheel"
|
||||
# using 'python' there because 'python3' does not exist in virtualenv on windows
|
||||
build: python setup.py bdist_wheel
|
||||
|
||||
resources:
|
||||
jobs:
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building my_test_code...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building python_artifact...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheels in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-aaaaa-bbbbbb",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 1 wheels to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheels in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheels to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,46 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
package/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
|
||||
{
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
"max_concurrent_runs": 1,
|
||||
"name": "[default] My Wheel Job",
|
||||
"queue": {
|
||||
"enabled": true
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"existing_cluster_id": "0717-aaaaa-bbbbbb",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files/package/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue