Merge branch 'main' of github.com:databricks/cli into docgen-update-new-framework

This commit is contained in:
Ilya Kuznetsov 2025-03-04 12:50:54 +01:00
commit 00d2963b93
No known key found for this signature in database
GPG Key ID: 91F3DDCF5D21CDDF
336 changed files with 3937 additions and 2181 deletions

View File

@ -1 +1 @@
c72c58f97b950fcb924a90ef164bcb10cfcd5ece
99f644e72261ef5ecf8d74db20f4b7a1e09723cc

View File

@ -179,7 +179,7 @@ func new{{.PascalName}}() *cobra.Command {
{{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}
{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}}
{{- $hasSingleRequiredRequestBodyFieldWithPrompt := and (and $hasIdPrompt $request) (eq 1 (len $request.RequiredRequestBodyFields)) -}}
{{- $onlyPathArgsRequiredAsPositionalArguments := and $request (eq (len .RequiredPositionalArguments) (len $request.RequiredPathFields)) -}}
{{- $onlyPathArgsRequiredAsPositionalArguments := and .Request (eq (len .RequiredPositionalArguments) (len .Request.RequiredPathFields)) -}}
{{- $hasDifferentArgsWithJsonFlag := and (not $onlyPathArgsRequiredAsPositionalArguments) (and $canUseJson (or $request.HasRequiredRequestBodyFields )) -}}
{{- $hasCustomArgHandler := or $hasRequiredArgs $hasDifferentArgsWithJsonFlag -}}
@ -218,12 +218,12 @@ func new{{.PascalName}}() *cobra.Command {
cmd.Args = func(cmd *cobra.Command, args []string) error {
{{- if $hasDifferentArgsWithJsonFlag }}
if cmd.Flags().Changed("json") {
err := root.ExactArgs({{len $request.RequiredPathFields}})(cmd, args)
err := root.ExactArgs({{len .Request.RequiredPathFields}})(cmd, args)
if err != nil {
{{- if eq 0 (len $request.RequiredPathFields) }}
{{- if eq 0 (len .Request.RequiredPathFields) }}
return fmt.Errorf("when --json flag is specified, no positional arguments are required. Provide{{- range $index, $field := $request.RequiredFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
{{- else }}
return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := $request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
return fmt.Errorf("when --json flag is specified, provide only{{- range $index, $field := .Request.RequiredPathFields}}{{if $index}},{{end}} {{$field.ConstantName}}{{end}} as positional arguments. Provide{{- range $index, $field := $request.RequiredRequestBodyFields}}{{if $index}},{{end}} '{{$field.Name}}'{{end}} in your JSON input")
{{- end }}
}
return nil

View File

@ -1,6 +1,10 @@
## Changes
<!-- Summary of your changes that are easy to understand -->
<!-- Brief summary of your changes that is easy to understand -->
## Why
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
For example, were there any decisions behind the change that are not reflected in the code itself? -->
## Tests
<!-- How is this tested? -->
<!-- How have you tested the changes? -->

View File

@ -53,7 +53,7 @@ jobs:
go-version-file: go.mod
- name: Setup Python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
with:
python-version: '3.9'
@ -95,7 +95,7 @@ jobs:
# Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code
- name: golangci-lint
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
uses: golangci/golangci-lint-action@2226d7cb06a077cd73e56eedd38eecad18e5d837 # v6.5.0
with:
version: v1.63.4
args: --timeout=15m

View File

@ -54,21 +54,21 @@ jobs:
args: release --snapshot --skip docker
- name: Upload macOS binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: cli_darwin_snapshot
path: |
dist/*_darwin_*/
- name: Upload Linux binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: cli_linux_snapshot
path: |
dist/*_linux_*/
- name: Upload Windows binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with:
name: cli_windows_snapshot
path: |

View File

@ -46,7 +46,7 @@ jobs:
# QEMU is required to build cross platform docker images using buildx.
# It allows virtualization of the CPU architecture at the application level.
- name: Set up QEMU dependency
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Run GoReleaser
id: releaser

1
.gitignore vendored
View File

@ -25,6 +25,7 @@ coverage-acceptance.txt
__pycache__
*.pyc
.idea
.vscode/launch.json
.vscode/tasks.json

View File

@ -1,5 +1,29 @@
# Version changelog
## [Release] Release v0.242.0
Notable changes:
Starting this version CLI does not load bundle auth information when CLI command is executed inside the bundle directory with explicitly provided via `-p` flag profile.
For more details see the related GitHub issue https://github.com/databricks/cli/issues/1358
CLI:
* Do not load host from bundle for CLI commands when profile flag is used ([#2335](https://github.com/databricks/cli/pull/2335)).
* Fixed accessing required path parameters in CLI generation when --json flag ([#2373](https://github.com/databricks/cli/pull/2373)).
Bundles:
* Provide instructions for testing in the default-python template ([#2355](https://github.com/databricks/cli/pull/2355)).
* Remove `run_as` from the built-in templates ([#2044](https://github.com/databricks/cli/pull/2044)).
* Change warning about incomplete permissions section into a recommendation ([#2043](https://github.com/databricks/cli/pull/2043)).
* Refine `mode: production` diagnostic output ([#2236](https://github.com/databricks/cli/pull/2236)).
* Support serverless mode in default-python template (explicit prompt) ([#2377](https://github.com/databricks/cli/pull/2377)).
* Set default data_security_mode to "SINGLE_USER" in bundle templates ([#2372](https://github.com/databricks/cli/pull/2372)).
* Fixed spark version check for clusters defined in the same bundle ([#2374](https://github.com/databricks/cli/pull/2374)).
API Changes:
* Added `databricks genie get-message-query-result-by-attachment` command.
OpenAPI commit 99f644e72261ef5ecf8d74db20f4b7a1e09723cc (2025-02-11)
## [Release] Release v0.241.2
This is a bugfix release to address an issue where jobs with tasks with a

View File

@ -217,8 +217,12 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
}
cloudEnv := os.Getenv("CLOUD_ENV")
if config.LocalOnly && cloudEnv != "" {
t.Skipf("Disabled via LocalOnly setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
if !isTruePtr(config.Local) && cloudEnv == "" {
t.Skipf("Disabled via Local setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
}
if !isTruePtr(config.Cloud) && cloudEnv != "" {
t.Skipf("Disabled via Cloud setting in %s (CLOUD_ENV=%s)", configPath, cloudEnv)
}
var tmpDir string
@ -263,9 +267,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
databricksLocalHost := os.Getenv("DATABRICKS_DEFAULT_HOST")
if len(config.Server) > 0 || config.RecordRequests {
if len(config.Server) > 0 || isTruePtr(config.RecordRequests) {
server = testserver.New(t)
if config.RecordRequests {
if isTruePtr(config.RecordRequests) {
requestsPath := filepath.Join(tmpDir, "out.requests.txt")
server.RecordRequestsCallback = func(request *testserver.Request) {
req := getLoggedRequest(request, config.IncludeRequestHeaders)
@ -386,6 +390,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if _, ok := Ignored[relPath]; ok {
continue
}
if config.CompiledIgnoreObject.MatchesPath(relPath) {
continue
}
unexpected = append(unexpected, relPath)
if strings.HasPrefix(relPath, "out") {
// We have a new file starting with "out"
@ -703,3 +710,7 @@ func filterHeaders(h http.Header, includedHeaders []string) http.Header {
}
return headers
}
func isTruePtr(value *bool) bool {
return value != nil && *value
}

View File

@ -11,9 +11,9 @@
>>> errcode [CLI] current-user me -t dev -p DEFAULT
"[USERNAME]"
=== Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail
=== Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle.
>>> errcode [CLI] current-user me -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Error: Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)
Exit code: 1
@ -23,6 +23,65 @@ Error: cannot resolve bundle auth configuration: config host mismatch: profile u
Exit code: 1
=== Bundle commands load bundle configuration when no flags, validation OK
>>> errcode [CLI] bundle validate
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -t flag, validation OK
>>> errcode [CLI] bundle validate -t dev
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)
>>> errcode [CLI] bundle validate -p profile_name
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host https://non-existing-subdomain.databricks.com, but CLI configured to use [DATABRICKS_TARGET]
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
Found 1 error
Exit code: 1
=== Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)
>>> errcode [CLI] bundle validate -t dev -p DEFAULT
Name: test-auth
Target: dev
Workspace:
Host: [DATABRICKS_TARGET]
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/test-auth/dev
Validation OK!
=== Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)
>>> errcode [CLI] bundle validate -t prod -p DEFAULT
Error: cannot resolve bundle auth configuration: config host mismatch: profile uses host [DATABRICKS_TARGET], but CLI configured to use https://bar.com
Name: test-auth
Target: prod
Workspace:
Host: https://bar.com
Found 1 error
Exit code: 1
=== Outside the bundle, no flags
>>> errcode [CLI] current-user me
"[USERNAME]"

View File

@ -15,12 +15,27 @@ trace errcode $CLI current-user me -t dev | jq .userName
title "Inside the bundle, target and matching profile"
trace errcode $CLI current-user me -t dev -p DEFAULT | jq .userName
title "Inside the bundle, profile flag not matching bundle host. Badness: should use profile from flag instead and not fail"
title "Inside the bundle, profile flag not matching bundle host. Should use profile from the flag and not the bundle."
trace errcode $CLI current-user me -p profile_name | jq .userName
title "Inside the bundle, target and not matching profile"
trace errcode $CLI current-user me -t dev -p profile_name
title "Bundle commands load bundle configuration when no flags, validation OK"
trace errcode $CLI bundle validate
title "Bundle commands load bundle configuration with -t flag, validation OK"
trace errcode $CLI bundle validate -t dev
title "Bundle commands load bundle configuration with -p flag, validation not OK (profile host don't match bundle host)"
trace errcode $CLI bundle validate -p profile_name
title "Bundle commands load bundle configuration with -t and -p flag, validation OK (profile host match bundle host)"
trace errcode $CLI bundle validate -t dev -p DEFAULT
title "Bundle commands load bundle configuration with -t and -p flag, validation not OK (profile host don't match bundle host)"
trace errcode $CLI bundle validate -t prod -p DEFAULT
cd ..
export DATABRICKS_HOST=$host
title "Outside the bundle, no flags"

View File

@ -1,5 +1,3 @@
Badness = "When -p flag is used inside the bundle folder for any CLI commands, CLI use bundle host anyway instead of profile one"
# Some of the clouds have DATABRICKS_HOST variable setup without https:// prefix
# In the result, output is replaced with DATABRICKS_URL variable instead of DATABRICKS_HOST
# This is a workaround to replace DATABRICKS_URL with DATABRICKS_HOST
@ -10,3 +8,7 @@ New='DATABRICKS_TARGET'
[[Repls]]
Old='DATABRICKS_URL'
New='DATABRICKS_TARGET'
[[Repls]]
Old='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": .*'
New='Get "https://non-existing-subdomain.databricks.com/api/2.0/preview/scim/v2/Me": (redacted)'

View File

@ -1,5 +1,3 @@
LocalOnly = true
RecordRequests = true
IncludeRequestHeaders = ["Authorization", "User-Agent"]

View File

@ -43,8 +43,8 @@ def main():
elif f not in set1:
print(f"Only in {d2}: {f}")
else:
a = [replaceAll(patterns, x) for x in p1.read_text().splitlines(True)]
b = [replaceAll(patterns, x) for x in p2.read_text().splitlines(True)]
a = replaceAll(patterns, p1.read_text()).splitlines(True)
b = replaceAll(patterns, p2.read_text()).splitlines(True)
if a != b:
p1_str = p1.as_posix()
p2_str = p2.as_posix()

36
acceptance/bin/find.py Executable file
View File

@ -0,0 +1,36 @@
#!/usr/bin/env python3
"""
Usage: find.py <regex>
Finds all files within current directory matching regex. The output is sorted and slashes are always forward.
If --expect N is provided, the number of matches must be N or error is printed.
"""
import sys
import os
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("regex")
parser.add_argument("--expect", type=int)
args = parser.parse_args()
regex = re.compile(args.regex)
result = []
for root, dirs, files in os.walk("."):
for filename in files:
path = os.path.join(root, filename).lstrip("./\\").replace("\\", "/")
if regex.search(path):
result.append(path)
result.sort()
for item in result:
print(item)
sys.stdout.flush()
if args.expect is not None:
if args.expect != len(result):
sys.exit(f"Expected {args.expect}, got {len(result)}")

View File

@ -0,0 +1,3 @@
command:
- python
- app.py

View File

@ -0,0 +1,8 @@
bundle:
name: apps_yaml
resources:
apps:
myapp:
name: myapp
source_code_path: ./app

View File

@ -0,0 +1,5 @@
{
"method": "POST",
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml",
"raw_body": "command:\n - python\n - app.py\n"
}

View File

@ -0,0 +1,15 @@
>>> [CLI] bundle validate
Name: apps_yaml
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default
Validation OK!
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate
trace $CLI bundle deploy
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml")' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt
rm out.requests.txt

View File

@ -0,0 +1 @@
print("Hello world!")

View File

@ -0,0 +1,12 @@
bundle:
name: apps_config_section
resources:
apps:
myapp:
name: myapp
source_code_path: ./app
config:
command:
- python
- app.py

View File

@ -0,0 +1,5 @@
{
"method": "POST",
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml",
"raw_body": "command:\n - python\n - app.py\n"
}

View File

@ -0,0 +1,23 @@
>>> [CLI] bundle validate
Warning: App config section detected
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
Name: apps_config_section
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default
Found 1 warning
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
Warning: App config section detected
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate
trace $CLI bundle deploy
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml")' out.requests.txt > out.app.yml.txt
rm out.requests.txt

View File

@ -0,0 +1,26 @@
Cloud = false
RecordRequests = true
Ignore = [
'.databricks',
]
[[Server]]
Pattern = "POST /api/2.0/apps"
[[Server]]
Pattern = "GET /api/2.0/apps/myapp"
Response.Body = '''
{
"name": "myapp",
"description": "",
"compute_status": {
"state": "ACTIVE",
"message": "App compute is active."
},
"app_status": {
"state": "RUNNING",
"message": "Application is running."
}
}
'''

View File

@ -0,0 +1,54 @@
bundle:
name: same_name_libraries
variables:
cluster:
default:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode
artifacts:
whl1:
type: whl
path: ./whl1
whl2:
type: whl
path: ./whl2
resources:
jobs:
test:
name: "test"
tasks:
- task_key: task1
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl
- pypi:
package: test_package
- task_key: task2
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl2/dist/*.whl
- maven:
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
- task_key: task3
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl

View File

@ -0,0 +1,14 @@
>>> errcode [CLI] bundle deploy
Building whl1...
Building whl2...
Error: Duplicate local library names: my_default_python-0.0.1-py3-none-any.whl
at resources.jobs.test.tasks[0].libraries[0].whl
resources.jobs.test.tasks[1].libraries[0].whl
in databricks.yml:36:15
databricks.yml:45:15
Local library names must be unique but found libraries with the same name: whl1/dist/my_default_python-0.0.1-py3-none-any.whl, whl2/dist/my_default_python-0.0.1-py3-none-any.whl
Exit code: 1

View File

@ -0,0 +1,2 @@
trace errcode $CLI bundle deploy
rm -rf whl1 whl2

View File

@ -0,0 +1,5 @@
RecordRequests = false
[[Repls]]
Old = '\\'
New = '/'

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,9 @@
export PYTHONDONTWRITEBYTECODE=1
uv venv -q --python 3.12 .venv
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
source .venv/Scripts/activate
else
source .venv/bin/activate
fi
uv pip install -q setuptools

View File

@ -0,0 +1,18 @@
Cloud = false
RecordRequests = true
Ignore = [
'.venv',
'dist',
'build',
'*egg-info',
'.databricks',
]
[[Server]]
Pattern = "GET /api/2.1/clusters/get"
Response.Body = '''
{
"cluster_id": "0717-132531-5opeqon1",
"spark_version": "13.3.x-scala2.12"
}
'''

View File

@ -0,0 +1,56 @@
bundle:
name: unique_name_libraries
variables:
cluster:
default:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode
artifacts:
whl1:
type: whl
path: ./whl1
whl2:
type: whl
path: ./whl2
resources:
jobs:
test:
name: "test"
tasks:
- task_key: task1
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_package
libraries:
- whl: ./whl1/dist/*.whl
- whl: cowsay
- pypi:
package: test_package
- task_key: task2
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_other_package
libraries:
- whl: ./whl2/dist/*.whl
- whl: cowsay
- maven:
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
- task_key: task3
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl

View File

@ -0,0 +1,10 @@
>>> errcode [CLI] bundle deploy
Building whl1...
Building whl2...
Uploading [package name]...
Uploading [package name]...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,2 @@
trace errcode $CLI bundle deploy
rm -rf whl1 whl2

View File

@ -0,0 +1,6 @@
RecordRequests = false
# The order in which files are uploaded can be different, so we just replace the name
[[Repls]]
Old="Uploading (my_package|my_other_package)-0.0.1-py3-none-any.whl"
New="Uploading [package name]"

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_package
setup(
name="my_package",
version=my_package.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_package/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_package.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_other_package
setup(
name="my_other_package",
version=my_other_package.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_other_package/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_other_package.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1,32 @@
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
=== Expecting to find no wheels
>>> errcode find.py --expect 0 whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "dbfs:/path/to/dist/mywheel.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting no wheels to be uploaded
>>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl
Exit code: 1

View File

@ -0,0 +1,12 @@
trace $CLI bundle deploy
title "Expecting to find no wheels"
trace errcode find.py --expect 0 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting no wheels to be uploaded"
trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl'
rm out.requests.txt

View File

@ -5,7 +5,8 @@ artifacts:
my_test_code:
type: whl
path: "./my_test_code"
build: "python3 setup.py bdist_wheel"
# using 'python' there because 'python3' does not exist in virtualenv on windows
build: python setup.py bdist_wheel
resources:
jobs:

View File

@ -0,0 +1,34 @@
>>> [CLI] bundle deploy
Building my_test_code...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,34 @@
>>> [CLI] bundle deploy
Building python_artifact...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,46 @@
>>> [CLI] bundle deploy
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
package/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
{
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"max_concurrent_runs": 1,
"name": "[default] My Wheel Job",
"queue": {
"enabled": true
},
"tasks": [
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
}
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files/package/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,33 @@
>>> [CLI] bundle deploy
Building python_artifact...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"notebook_task": {
"notebook_path": "/notebook.py"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -5,11 +5,11 @@ artifacts:
my_test_code:
type: whl
path: "./my_test_code"
build: "python3 setup.py bdist_wheel"
build: "python setup.py bdist_wheel"
my_test_code_2:
type: whl
path: "./my_test_code"
build: "python3 setup2.py bdist_wheel"
build: "python setup2.py bdist_wheel"
resources:
jobs:

View File

@ -0,0 +1,42 @@
>>> [CLI] bundle deploy
Building my_test_code...
Building my_test_code_2...
Deploying resources...
Deployment complete!
Updating deployment state...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading my_test_code_2-0.0.1-py3-none-any.whl...
>>> find.py --expect 2 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl
=== Expecting 2 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
},
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 2 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
trace find.py --expect 2 whl
title "Expecting 2 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 2 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm -fr out.requests.txt

View File

@ -13,4 +13,4 @@ resources:
entry_point: "run"
libraries:
- whl: ./dist/*.whl
- whl: ./dist/lib/my_test_code-0.0.1-py3-none-any.whl
- whl: ./dist/lib/other_test_code-0.0.1-py3-none-any.whl

View File

@ -0,0 +1,45 @@
>>> find.py --expect 2 whl
dist/lib/other_test_code-0.0.1-py3-none-any.whl
dist/my_test_code-0.0.1-py3-none-any.whl
>>> [CLI] bundle deploy
Deploying resources...
Deployment complete!
Updating deployment state...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading other_test_code-0.0.1-py3-none-any.whl...
=== Expecting to find 2 wheels, same as initially provided
>>> find.py --expect 2 whl
dist/lib/other_test_code-0.0.1-py3-none-any.whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 2 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
},
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 2 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,14 @@
trace find.py --expect 2 whl
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
title "Expecting to find 2 wheels, same as initially provided"
trace find.py --expect 2 whl
title "Expecting 2 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 2 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -5,7 +5,7 @@ artifacts:
my_test_code:
type: whl
path: "./my_test_code"
build: "python3 setup.py bdist_wheel"
build: python setup.py bdist_wheel
resources:
jobs:

View File

@ -0,0 +1,54 @@
>>> [CLI] bundle deploy
Building my_test_code...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/environment_key/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in environments section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
{
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/environment_key/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"environments": [
{
"environment_key": "test_env",
"spec": {
"client": "1",
"dependencies": [
"/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
]
}
}
],
"format": "MULTI_TASK",
"max_concurrent_runs": 1,
"name": "My Wheel Job",
"queue": {
"enabled": true
},
"tasks": [
{
"environment_key": "test_env",
"existing_cluster_id": "0717-132531-5opeqon1",
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
}
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in environments section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

Some files were not shown because too many files have changed in this diff Show More