diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 5be087016..2f8597999 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,6 +1,10 @@ ## Changes - + + +## Why + ## Tests - + diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index c41afc18c..cbae43f2f 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -53,7 +53,7 @@ jobs: go-version-file: go.mod - name: Setup Python - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 + uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0 with: python-version: '3.9' @@ -95,7 +95,7 @@ jobs: # Exit with status code 1 if there are differences (i.e. unformatted files) git diff --exit-code - name: golangci-lint - uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0 + uses: golangci/golangci-lint-action@2226d7cb06a077cd73e56eedd38eecad18e5d837 # v6.5.0 with: version: v1.63.4 args: --timeout=15m diff --git a/.github/workflows/release-snapshot.yml b/.github/workflows/release-snapshot.yml index 8b4684eab..e37de8920 100644 --- a/.github/workflows/release-snapshot.yml +++ b/.github/workflows/release-snapshot.yml @@ -54,21 +54,21 @@ jobs: args: release --snapshot --skip docker - name: Upload macOS binaries - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: cli_darwin_snapshot path: | dist/*_darwin_*/ - name: Upload Linux binaries - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: cli_linux_snapshot path: | dist/*_linux_*/ - name: Upload Windows binaries - uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 + uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1 with: name: cli_windows_snapshot path: | diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index fe5b4170b..81810c606 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -46,7 +46,7 @@ jobs: # QEMU is required to build cross platform docker images using buildx. # It allows virtualization of the CPU architecture at the application level. - name: Set up QEMU dependency - uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 + uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0 - name: Run GoReleaser id: releaser diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 066a84299..afdc42abc 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -390,6 +390,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont if _, ok := Ignored[relPath]; ok { continue } + if config.CompiledIgnoreObject.MatchesPath(relPath) { + continue + } unexpected = append(unexpected, relPath) if strings.HasPrefix(relPath, "out") { // We have a new file starting with "out" diff --git a/acceptance/bin/find.py b/acceptance/bin/find.py new file mode 100755 index 000000000..d122404b2 --- /dev/null +++ b/acceptance/bin/find.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +""" +Usage: find.py +Finds all files within current directory matching regex. The output is sorted and slashes are always forward. + +If --expect N is provided, the number of matches must be N or error is printed. +""" + +import sys +import os +import re +import argparse + + +parser = argparse.ArgumentParser() +parser.add_argument("regex") +parser.add_argument("--expect", type=int) +args = parser.parse_args() + +regex = re.compile(args.regex) +result = [] + +for root, dirs, files in os.walk("."): + for filename in files: + path = os.path.join(root, filename).lstrip("./\\").replace("\\", "/") + if regex.search(path): + result.append(path) + +result.sort() +for item in result: + print(item) +sys.stdout.flush() + +if args.expect is not None: + if args.expect != len(result): + sys.exit(f"Expected {args.expect}, got {len(result)}") diff --git a/acceptance/bundle/apps/app_yaml/app/app.py b/acceptance/bundle/apps/app_yaml/app/app.py new file mode 100644 index 000000000..e69de29bb diff --git a/acceptance/bundle/apps/app_yaml/app/app.yml b/acceptance/bundle/apps/app_yaml/app/app.yml new file mode 100644 index 000000000..61471358d --- /dev/null +++ b/acceptance/bundle/apps/app_yaml/app/app.yml @@ -0,0 +1,3 @@ +command: + - python + - app.py diff --git a/acceptance/bundle/apps/app_yaml/databricks.yml b/acceptance/bundle/apps/app_yaml/databricks.yml new file mode 100644 index 000000000..0064e6c6b --- /dev/null +++ b/acceptance/bundle/apps/app_yaml/databricks.yml @@ -0,0 +1,8 @@ +bundle: + name: apps_yaml + +resources: + apps: + myapp: + name: myapp + source_code_path: ./app diff --git a/acceptance/bundle/apps/app_yaml/out.app.yml.txt b/acceptance/bundle/apps/app_yaml/out.app.yml.txt new file mode 100644 index 000000000..eccd4eb13 --- /dev/null +++ b/acceptance/bundle/apps/app_yaml/out.app.yml.txt @@ -0,0 +1,5 @@ +{ + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml", + "raw_body": "command:\n - python\n - app.py\n" +} diff --git a/acceptance/bundle/apps/app_yaml/output.txt b/acceptance/bundle/apps/app_yaml/output.txt new file mode 100644 index 000000000..2a946e5ee --- /dev/null +++ b/acceptance/bundle/apps/app_yaml/output.txt @@ -0,0 +1,15 @@ + +>>> [CLI] bundle validate +Name: apps_yaml +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default + +Validation OK! + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! diff --git a/acceptance/bundle/apps/app_yaml/script b/acceptance/bundle/apps/app_yaml/script new file mode 100644 index 000000000..8cb34c62d --- /dev/null +++ b/acceptance/bundle/apps/app_yaml/script @@ -0,0 +1,4 @@ +trace $CLI bundle validate +trace $CLI bundle deploy +jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml")' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt +rm out.requests.txt diff --git a/acceptance/bundle/apps/config_section/app/app.py b/acceptance/bundle/apps/config_section/app/app.py new file mode 100644 index 000000000..f1a18139c --- /dev/null +++ b/acceptance/bundle/apps/config_section/app/app.py @@ -0,0 +1 @@ +print("Hello world!") diff --git a/acceptance/bundle/apps/config_section/databricks.yml b/acceptance/bundle/apps/config_section/databricks.yml new file mode 100644 index 000000000..25ab2f261 --- /dev/null +++ b/acceptance/bundle/apps/config_section/databricks.yml @@ -0,0 +1,12 @@ +bundle: + name: apps_config_section + +resources: + apps: + myapp: + name: myapp + source_code_path: ./app + config: + command: + - python + - app.py diff --git a/acceptance/bundle/apps/config_section/out.app.yml.txt b/acceptance/bundle/apps/config_section/out.app.yml.txt new file mode 100644 index 000000000..a3e69351b --- /dev/null +++ b/acceptance/bundle/apps/config_section/out.app.yml.txt @@ -0,0 +1,5 @@ +{ + "method": "POST", + "path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml", + "raw_body": "command:\n - python\n - app.py\n" +} diff --git a/acceptance/bundle/apps/config_section/output.txt b/acceptance/bundle/apps/config_section/output.txt new file mode 100644 index 000000000..800945278 --- /dev/null +++ b/acceptance/bundle/apps/config_section/output.txt @@ -0,0 +1,23 @@ + +>>> [CLI] bundle validate +Warning: App config section detected + +remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead + +Name: apps_config_section +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default + +Found 1 warning + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! +Warning: App config section detected + +remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead + diff --git a/acceptance/bundle/apps/config_section/script b/acceptance/bundle/apps/config_section/script new file mode 100644 index 000000000..5769918be --- /dev/null +++ b/acceptance/bundle/apps/config_section/script @@ -0,0 +1,4 @@ +trace $CLI bundle validate +trace $CLI bundle deploy +jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml")' out.requests.txt > out.app.yml.txt +rm out.requests.txt diff --git a/acceptance/bundle/apps/test.toml b/acceptance/bundle/apps/test.toml new file mode 100644 index 000000000..6999f2089 --- /dev/null +++ b/acceptance/bundle/apps/test.toml @@ -0,0 +1,26 @@ +Cloud = false +RecordRequests = true + +Ignore = [ + '.databricks', +] + +[[Server]] +Pattern = "POST /api/2.0/apps" + +[[Server]] +Pattern = "GET /api/2.0/apps/myapp" +Response.Body = ''' +{ + "name": "myapp", + "description": "", + "compute_status": { + "state": "ACTIVE", + "message": "App compute is active." + }, + "app_status": { + "state": "RUNNING", + "message": "Application is running." + } +} +''' diff --git a/acceptance/bundle/artifacts/same_name_libraries/test.toml b/acceptance/bundle/artifacts/same_name_libraries/test.toml index a298217f2..a17f2659f 100644 --- a/acceptance/bundle/artifacts/same_name_libraries/test.toml +++ b/acceptance/bundle/artifacts/same_name_libraries/test.toml @@ -1,3 +1,5 @@ +RecordRequests = false + [[Repls]] Old = '\\' New = '/' diff --git a/acceptance/bundle/artifacts/script.prepare b/acceptance/bundle/artifacts/script.prepare new file mode 100644 index 000000000..673b20af9 --- /dev/null +++ b/acceptance/bundle/artifacts/script.prepare @@ -0,0 +1,9 @@ +export PYTHONDONTWRITEBYTECODE=1 + +uv venv -q --python 3.12 .venv +if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then + source .venv/Scripts/activate +else + source .venv/bin/activate +fi +uv pip install -q setuptools diff --git a/acceptance/bundle/artifacts/test.toml b/acceptance/bundle/artifacts/test.toml new file mode 100644 index 000000000..42432706e --- /dev/null +++ b/acceptance/bundle/artifacts/test.toml @@ -0,0 +1,18 @@ +Cloud = false +RecordRequests = true +Ignore = [ + '.venv', + 'dist', + 'build', + '*egg-info', + '.databricks', +] + +[[Server]] +Pattern = "GET /api/2.1/clusters/get" +Response.Body = ''' +{ + "cluster_id": "0717-132531-5opeqon1", + "spark_version": "13.3.x-scala2.12" +} +''' diff --git a/acceptance/bundle/artifacts/unique_name_libraries/output.txt b/acceptance/bundle/artifacts/unique_name_libraries/output.txt index ecc7bf57b..634574bf5 100644 --- a/acceptance/bundle/artifacts/unique_name_libraries/output.txt +++ b/acceptance/bundle/artifacts/unique_name_libraries/output.txt @@ -2,8 +2,8 @@ >>> errcode [CLI] bundle deploy Building whl1... Building whl2... -Uploading [package name] -Uploading [package name] +Uploading [package name]... +Uploading [package name]... Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files... Deploying resources... Updating deployment state... diff --git a/acceptance/bundle/artifacts/unique_name_libraries/test.toml b/acceptance/bundle/artifacts/unique_name_libraries/test.toml index 280338bd6..7956c1909 100644 --- a/acceptance/bundle/artifacts/unique_name_libraries/test.toml +++ b/acceptance/bundle/artifacts/unique_name_libraries/test.toml @@ -1,6 +1,6 @@ -Cloud = false +RecordRequests = false # The order in which files are uploaded can be different, so we just replace the name [[Repls]] -Old="Uploading .*-0.0.1-py3-none-any.whl..." +Old="Uploading (my_package|my_other_package)-0.0.1-py3-none-any.whl" New="Uploading [package name]" diff --git a/bundle/tests/python_wheel/python_wheel_dbfs_lib/bundle.yml b/acceptance/bundle/artifacts/whl_dbfs/databricks.yml similarity index 100% rename from bundle/tests/python_wheel/python_wheel_dbfs_lib/bundle.yml rename to acceptance/bundle/artifacts/whl_dbfs/databricks.yml diff --git a/acceptance/bundle/artifacts/whl_dbfs/output.txt b/acceptance/bundle/artifacts/whl_dbfs/output.txt new file mode 100644 index 000000000..f0615c558 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dbfs/output.txt @@ -0,0 +1,32 @@ + +>>> [CLI] bundle deploy +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +=== Expecting to find no wheels +>>> errcode find.py --expect 0 whl + +=== Expecting 1 wheel in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "dbfs:/path/to/dist/mywheel.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } +] + +=== Expecting no wheels to be uploaded +>>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl + +Exit code: 1 diff --git a/acceptance/bundle/artifacts/whl_dbfs/script b/acceptance/bundle/artifacts/whl_dbfs/script new file mode 100644 index 000000000..d3b12cb8e --- /dev/null +++ b/acceptance/bundle/artifacts/whl_dbfs/script @@ -0,0 +1,12 @@ +trace $CLI bundle deploy + +title "Expecting to find no wheels" +trace errcode find.py --expect 0 whl + +title "Expecting 1 wheel in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting no wheels to be uploaded" +trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl' + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/environment_key/.gitignore b/acceptance/bundle/artifacts/whl_explicit/.gitignore similarity index 100% rename from bundle/tests/python_wheel/environment_key/.gitignore rename to acceptance/bundle/artifacts/whl_explicit/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel/bundle.yml b/acceptance/bundle/artifacts/whl_explicit/databricks.yml similarity index 77% rename from bundle/tests/python_wheel/python_wheel/bundle.yml rename to acceptance/bundle/artifacts/whl_explicit/databricks.yml index 017fe1c43..45106c0b0 100644 --- a/bundle/tests/python_wheel/python_wheel/bundle.yml +++ b/acceptance/bundle/artifacts/whl_explicit/databricks.yml @@ -5,7 +5,8 @@ artifacts: my_test_code: type: whl path: "./my_test_code" - build: "python3 setup.py bdist_wheel" + # using 'python' there because 'python3' does not exist in virtualenv on windows + build: python setup.py bdist_wheel resources: jobs: diff --git a/bundle/tests/python_wheel/environment_key/my_test_code/setup.py b/acceptance/bundle/artifacts/whl_explicit/my_test_code/setup.py similarity index 100% rename from bundle/tests/python_wheel/environment_key/my_test_code/setup.py rename to acceptance/bundle/artifacts/whl_explicit/my_test_code/setup.py diff --git a/bundle/tests/python_wheel/environment_key/my_test_code/src/__init__.py b/acceptance/bundle/artifacts/whl_explicit/my_test_code/src/__init__.py similarity index 100% rename from bundle/tests/python_wheel/environment_key/my_test_code/src/__init__.py rename to acceptance/bundle/artifacts/whl_explicit/my_test_code/src/__init__.py diff --git a/bundle/tests/python_wheel/environment_key/my_test_code/src/__main__.py b/acceptance/bundle/artifacts/whl_explicit/my_test_code/src/__main__.py similarity index 100% rename from bundle/tests/python_wheel/environment_key/my_test_code/src/__main__.py rename to acceptance/bundle/artifacts/whl_explicit/my_test_code/src/__main__.py diff --git a/acceptance/bundle/artifacts/whl_explicit/output.txt b/acceptance/bundle/artifacts/whl_explicit/output.txt new file mode 100644 index 000000000..1018501db --- /dev/null +++ b/acceptance/bundle/artifacts/whl_explicit/output.txt @@ -0,0 +1,34 @@ + +>>> [CLI] bundle deploy +Building my_test_code... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> find.py --expect 1 whl +my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 1 wheel in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } +] + +=== Expecting 1 wheel to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_explicit/script b/acceptance/bundle/artifacts/whl_explicit/script new file mode 100644 index 000000000..bb7e26ae1 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_explicit/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy + +trace find.py --expect 1 whl + +title "Expecting 1 wheel in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting 1 wheel to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel/.gitignore b/acceptance/bundle/artifacts/whl_implicit/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel/.gitignore rename to acceptance/bundle/artifacts/whl_implicit/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact/bundle.yml b/acceptance/bundle/artifacts/whl_implicit/databricks.yml similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact/bundle.yml rename to acceptance/bundle/artifacts/whl_implicit/databricks.yml diff --git a/bundle/tests/python_wheel/python_wheel/my_test_code/src/__init__.py b/acceptance/bundle/artifacts/whl_implicit/my_test_code/__init__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel/my_test_code/src/__init__.py rename to acceptance/bundle/artifacts/whl_implicit/my_test_code/__init__.py diff --git a/bundle/tests/python_wheel/python_wheel/my_test_code/src/__main__.py b/acceptance/bundle/artifacts/whl_implicit/my_test_code/__main__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel/my_test_code/src/__main__.py rename to acceptance/bundle/artifacts/whl_implicit/my_test_code/__main__.py diff --git a/acceptance/bundle/artifacts/whl_implicit/output.txt b/acceptance/bundle/artifacts/whl_implicit/output.txt new file mode 100644 index 000000000..69ff56c42 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit/output.txt @@ -0,0 +1,34 @@ + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> find.py --expect 1 whl +dist/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 1 wheels in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-aaaaa-bbbbbb", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } +] + +=== Expecting 1 wheels to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_implicit/script b/acceptance/bundle/artifacts/whl_implicit/script new file mode 100644 index 000000000..78c4d75e0 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy + +trace find.py --expect 1 whl + +title "Expecting 1 wheels in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting 1 wheels to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact/setup.py b/acceptance/bundle/artifacts/whl_implicit/setup.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact/setup.py rename to acceptance/bundle/artifacts/whl_implicit/setup.py diff --git a/bundle/tests/python_wheel/python_wheel_multiple/.gitignore b/acceptance/bundle/artifacts/whl_implicit_custom_path/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel_multiple/.gitignore rename to acceptance/bundle/artifacts/whl_implicit_custom_path/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/bundle.yml b/acceptance/bundle/artifacts/whl_implicit_custom_path/databricks.yml similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/bundle.yml rename to acceptance/bundle/artifacts/whl_implicit_custom_path/databricks.yml diff --git a/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt b/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt new file mode 100644 index 000000000..0658dce3a --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit_custom_path/output.txt @@ -0,0 +1,46 @@ + +>>> [CLI] bundle deploy +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> find.py --expect 1 whl +package/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 1 wheel in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt +{ + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "[default] My Wheel Job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "existing_cluster_id": "0717-aaaaa-bbbbbb", + "libraries": [ + { + "whl": "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } + ] +} + +=== Expecting 1 wheel to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files/package/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl" diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl b/acceptance/bundle/artifacts/whl_implicit_custom_path/package/my_test_code-0.0.1-py3-none-any.whl similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl rename to acceptance/bundle/artifacts/whl_implicit_custom_path/package/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_implicit_custom_path/script b/acceptance/bundle/artifacts/whl_implicit_custom_path/script new file mode 100644 index 000000000..d4c2438db --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit_custom_path/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy + +trace find.py --expect 1 whl + +title "Expecting 1 wheel in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt + +title "Expecting 1 wheel to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact/.gitignore b/acceptance/bundle/artifacts/whl_implicit_notebook/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact/.gitignore rename to acceptance/bundle/artifacts/whl_implicit_notebook/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/bundle.yml b/acceptance/bundle/artifacts/whl_implicit_notebook/databricks.yml similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/bundle.yml rename to acceptance/bundle/artifacts/whl_implicit_notebook/databricks.yml diff --git a/bundle/tests/python_wheel/python_wheel_multiple/my_test_code/src/__init__.py b/acceptance/bundle/artifacts/whl_implicit_notebook/my_test_code/__init__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_multiple/my_test_code/src/__init__.py rename to acceptance/bundle/artifacts/whl_implicit_notebook/my_test_code/__init__.py diff --git a/bundle/tests/python_wheel/python_wheel_multiple/my_test_code/src/__main__.py b/acceptance/bundle/artifacts/whl_implicit_notebook/my_test_code/__main__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_multiple/my_test_code/src/__main__.py rename to acceptance/bundle/artifacts/whl_implicit_notebook/my_test_code/__main__.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/notebook.py b/acceptance/bundle/artifacts/whl_implicit_notebook/notebook.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/notebook.py rename to acceptance/bundle/artifacts/whl_implicit_notebook/notebook.py diff --git a/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt b/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt new file mode 100644 index 000000000..9c7296b9a --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit_notebook/output.txt @@ -0,0 +1,33 @@ + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> find.py --expect 1 whl +dist/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 1 wheel in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-aaaaa-bbbbbb", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + } + ], + "notebook_task": { + "notebook_path": "/notebook.py" + }, + "task_key": "TestTask" + } +] + +=== Expecting 1 wheel to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_implicit_notebook/script b/acceptance/bundle/artifacts/whl_implicit_notebook/script new file mode 100644 index 000000000..bb7e26ae1 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_implicit_notebook/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy + +trace find.py --expect 1 whl + +title "Expecting 1 wheel in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting 1 wheel to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/setup.py b/acceptance/bundle/artifacts/whl_implicit_notebook/setup.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/setup.py rename to acceptance/bundle/artifacts/whl_implicit_notebook/setup.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/.gitignore b/acceptance/bundle/artifacts/whl_multiple/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_no_setup/.gitignore rename to acceptance/bundle/artifacts/whl_multiple/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel_multiple/bundle.yml b/acceptance/bundle/artifacts/whl_multiple/databricks.yml similarity index 85% rename from bundle/tests/python_wheel/python_wheel_multiple/bundle.yml rename to acceptance/bundle/artifacts/whl_multiple/databricks.yml index 770110416..2bfd85abd 100644 --- a/bundle/tests/python_wheel/python_wheel_multiple/bundle.yml +++ b/acceptance/bundle/artifacts/whl_multiple/databricks.yml @@ -5,11 +5,11 @@ artifacts: my_test_code: type: whl path: "./my_test_code" - build: "python3 setup.py bdist_wheel" + build: "python setup.py bdist_wheel" my_test_code_2: type: whl path: "./my_test_code" - build: "python3 setup2.py bdist_wheel" + build: "python setup2.py bdist_wheel" resources: jobs: diff --git a/bundle/tests/python_wheel/python_wheel/my_test_code/setup.py b/acceptance/bundle/artifacts/whl_multiple/my_test_code/setup.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel/my_test_code/setup.py rename to acceptance/bundle/artifacts/whl_multiple/my_test_code/setup.py diff --git a/bundle/tests/python_wheel/python_wheel_multiple/my_test_code/setup2.py b/acceptance/bundle/artifacts/whl_multiple/my_test_code/setup2.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_multiple/my_test_code/setup2.py rename to acceptance/bundle/artifacts/whl_multiple/my_test_code/setup2.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__init__.py b/acceptance/bundle/artifacts/whl_multiple/my_test_code/src/__init__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__init__.py rename to acceptance/bundle/artifacts/whl_multiple/my_test_code/src/__init__.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__main__.py b/acceptance/bundle/artifacts/whl_multiple/my_test_code/src/__main__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact/my_test_code/__main__.py rename to acceptance/bundle/artifacts/whl_multiple/my_test_code/src/__main__.py diff --git a/acceptance/bundle/artifacts/whl_multiple/output.txt b/acceptance/bundle/artifacts/whl_multiple/output.txt new file mode 100644 index 000000000..9335b9cc5 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_multiple/output.txt @@ -0,0 +1,42 @@ + +>>> [CLI] bundle deploy +Building my_test_code... +Building my_test_code_2... +Deploying resources... +Deployment complete! +Updating deployment state... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading my_test_code_2-0.0.1-py3-none-any.whl... + +>>> find.py --expect 2 whl +my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl +my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl + +=== Expecting 2 wheels in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + }, + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } +] + +=== Expecting 2 wheels to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_multiple/script b/acceptance/bundle/artifacts/whl_multiple/script new file mode 100644 index 000000000..a475e9f73 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_multiple/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order + +trace find.py --expect 2 whl + +title "Expecting 2 wheels in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting 2 wheels to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm -fr out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/.gitignore b/acceptance/bundle/artifacts/whl_prebuilt_multiple/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/.gitignore rename to acceptance/bundle/artifacts/whl_prebuilt_multiple/.gitignore diff --git a/bundle/tests/python_wheel/python_wheel_no_build/bundle.yml b/acceptance/bundle/artifacts/whl_prebuilt_multiple/databricks.yml similarity index 84% rename from bundle/tests/python_wheel/python_wheel_no_build/bundle.yml rename to acceptance/bundle/artifacts/whl_prebuilt_multiple/databricks.yml index e10e3993d..4ad0c6afa 100644 --- a/bundle/tests/python_wheel/python_wheel_no_build/bundle.yml +++ b/acceptance/bundle/artifacts/whl_prebuilt_multiple/databricks.yml @@ -13,4 +13,4 @@ resources: entry_point: "run" libraries: - whl: ./dist/*.whl - - whl: ./dist/lib/my_test_code-0.0.1-py3-none-any.whl + - whl: ./dist/lib/other_test_code-0.0.1-py3-none-any.whl diff --git a/bundle/tests/python_wheel/python_wheel_no_build/dist/lib/my_test_code-0.0.1-py3-none-any.whl b/acceptance/bundle/artifacts/whl_prebuilt_multiple/dist/lib/other_test_code-0.0.1-py3-none-any.whl similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_build/dist/lib/my_test_code-0.0.1-py3-none-any.whl rename to acceptance/bundle/artifacts/whl_prebuilt_multiple/dist/lib/other_test_code-0.0.1-py3-none-any.whl diff --git a/bundle/tests/python_wheel/python_wheel_no_build/dist/my_test_code-0.0.1-py3-none-any.whl b/acceptance/bundle/artifacts/whl_prebuilt_multiple/dist/my_test_code-0.0.1-py3-none-any.whl similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_build/dist/my_test_code-0.0.1-py3-none-any.whl rename to acceptance/bundle/artifacts/whl_prebuilt_multiple/dist/my_test_code-0.0.1-py3-none-any.whl diff --git a/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt b/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt new file mode 100644 index 000000000..c9cd82fb8 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_prebuilt_multiple/output.txt @@ -0,0 +1,45 @@ + +>>> find.py --expect 2 whl +dist/lib/other_test_code-0.0.1-py3-none-any.whl +dist/my_test_code-0.0.1-py3-none-any.whl + +>>> [CLI] bundle deploy +Deploying resources... +Deployment complete! +Updating deployment state... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading other_test_code-0.0.1-py3-none-any.whl... + +=== Expecting to find 2 wheels, same as initially provided +>>> find.py --expect 2 whl +dist/lib/other_test_code-0.0.1-py3-none-any.whl +dist/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 2 wheels in libraries section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt +[ + { + "existing_cluster_id": "0717-132531-5opeqon1", + "libraries": [ + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + }, + { + "whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl" + } + ], + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } +] + +=== Expecting 2 wheels to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_prebuilt_multiple/script b/acceptance/bundle/artifacts/whl_prebuilt_multiple/script new file mode 100644 index 000000000..5265151e2 --- /dev/null +++ b/acceptance/bundle/artifacts/whl_prebuilt_multiple/script @@ -0,0 +1,14 @@ +trace find.py --expect 2 whl + +trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order + +title "Expecting to find 2 wheels, same as initially provided" +trace find.py --expect 2 whl + +title "Expecting 2 wheels in libraries section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt + +title "Expecting 2 wheels to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/bundle/tests/python_wheel/python_wheel_no_build/.gitignore b/acceptance/bundle/artifacts/whl_via_environment_key/.gitignore similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_build/.gitignore rename to acceptance/bundle/artifacts/whl_via_environment_key/.gitignore diff --git a/bundle/tests/python_wheel/environment_key/databricks.yml b/acceptance/bundle/artifacts/whl_via_environment_key/databricks.yml similarity index 92% rename from bundle/tests/python_wheel/environment_key/databricks.yml rename to acceptance/bundle/artifacts/whl_via_environment_key/databricks.yml index 198f8c0d2..4ca3f9113 100644 --- a/bundle/tests/python_wheel/environment_key/databricks.yml +++ b/acceptance/bundle/artifacts/whl_via_environment_key/databricks.yml @@ -5,7 +5,7 @@ artifacts: my_test_code: type: whl path: "./my_test_code" - build: "python3 setup.py bdist_wheel" + build: python setup.py bdist_wheel resources: jobs: diff --git a/bundle/tests/python_wheel/python_wheel_multiple/my_test_code/setup.py b/acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/setup.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_multiple/my_test_code/setup.py rename to acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/setup.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/my_test_code/__init__.py b/acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/src/__init__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/my_test_code/__init__.py rename to acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/src/__init__.py diff --git a/bundle/tests/python_wheel/python_wheel_no_artifact_notebook/my_test_code/__main__.py b/acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/src/__main__.py similarity index 100% rename from bundle/tests/python_wheel/python_wheel_no_artifact_notebook/my_test_code/__main__.py rename to acceptance/bundle/artifacts/whl_via_environment_key/my_test_code/src/__main__.py diff --git a/acceptance/bundle/artifacts/whl_via_environment_key/output.txt b/acceptance/bundle/artifacts/whl_via_environment_key/output.txt new file mode 100644 index 000000000..8b6b781aa --- /dev/null +++ b/acceptance/bundle/artifacts/whl_via_environment_key/output.txt @@ -0,0 +1,54 @@ + +>>> [CLI] bundle deploy +Building my_test_code... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/environment_key/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> find.py --expect 1 whl +my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl + +=== Expecting 1 wheel in environments section in /jobs/create +>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt +{ + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/environment_key/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "environments": [ + { + "environment_key": "test_env", + "spec": { + "client": "1", + "dependencies": [ + "/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" + ] + } + } + ], + "format": "MULTI_TASK", + "max_concurrent_runs": 1, + "name": "My Wheel Job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "environment_key": "test_env", + "existing_cluster_id": "0717-132531-5opeqon1", + "python_wheel_task": { + "entry_point": "run", + "package_name": "my_test_code" + }, + "task_key": "TestTask" + } + ] +} + +=== Expecting 1 wheel to be uploaded +>>> jq .path +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl" +"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl" diff --git a/acceptance/bundle/artifacts/whl_via_environment_key/script b/acceptance/bundle/artifacts/whl_via_environment_key/script new file mode 100644 index 000000000..1914aeb8c --- /dev/null +++ b/acceptance/bundle/artifacts/whl_via_environment_key/script @@ -0,0 +1,11 @@ +trace $CLI bundle deploy + +trace find.py --expect 1 whl + +title "Expecting 1 wheel in environments section in /jobs/create" +trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt + +title "Expecting 1 wheel to be uploaded" +trace jq .path < out.requests.txt | grep import | grep whl | sort + +rm out.requests.txt diff --git a/acceptance/bundle/debug/out.stderr.parallel.txt b/acceptance/bundle/debug/out.stderr.parallel.txt deleted file mode 100644 index 13c81c511..000000000 --- a/acceptance/bundle/debug/out.stderr.parallel.txt +++ /dev/null @@ -1,15 +0,0 @@ -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions -10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns -10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync -10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true -< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true -< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true -< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true diff --git a/acceptance/bundle/debug/out.stderr.txt b/acceptance/bundle/debug/out.stderr.txt index 147b33a45..b9ac5c4d9 100644 --- a/acceptance/bundle/debug/out.stderr.txt +++ b/acceptance/bundle/debug/out.stderr.txt @@ -72,18 +72,30 @@ 10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize 10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit 10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit -10:07:59 Debug: Apply pid=12345 mutator=validate +10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) +10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:files_to_sync +10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:folder_permissions +10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:validate_sync_patterns +10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_cluster_key_defined +10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_task_cluster_spec +10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:SingleNodeCluster +10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:artifact_paths 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files < HTTP/1.1 404 Not Found < { < "message": "Workspace path not found" +< } pid=12345 mutator=validate:files_to_sync sdk=true +10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate:files_to_sync sdk=true 10:07:59 Debug: POST /api/2.0/workspace/mkdirs > { > "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files" > } +< HTTP/1.1 200 OK pid=12345 mutator=validate:files_to_sync sdk=true 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files < HTTP/1.1 200 OK < { < "object_type": "DIRECTORY", < "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files" +< } pid=12345 mutator=validate:files_to_sync sdk=true +10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate:files_to_sync 10:07:59 Info: completed execution pid=12345 exit_code=0 diff --git a/acceptance/bundle/debug/script b/acceptance/bundle/debug/script index 005a1a341..913f07e41 100644 --- a/acceptance/bundle/debug/script +++ b/acceptance/bundle/debug/script @@ -1,4 +1 @@ -$CLI bundle validate --debug 2> full.stderr.txt -grep -vw parallel full.stderr.txt > out.stderr.txt -grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt -rm full.stderr.txt +$CLI bundle validate --debug 2> out.stderr.txt diff --git a/acceptance/bundle/includes/include_outside_root/output.txt b/acceptance/bundle/includes/include_outside_root/output.txt index 720c3a826..f4f49f3ac 100644 --- a/acceptance/bundle/includes/include_outside_root/output.txt +++ b/acceptance/bundle/includes/include_outside_root/output.txt @@ -2,10 +2,8 @@ Warning: Include section is defined outside root file at include in a.yml:2:3 -The include section is defined in a file that is not the root file. -These values will be ignored because only the includes defined in -the bundle root file (that is databricks.yml or databricks.yaml) -are loaded. +An include section is defined in a file that is not databricks.yml. +Only includes defined in databricks.yml are applied. Name: include_outside_root Target: default diff --git a/acceptance/config_test.go b/acceptance/config_test.go index 4edfee69d..cc5257c65 100644 --- a/acceptance/config_test.go +++ b/acceptance/config_test.go @@ -11,6 +11,7 @@ import ( "github.com/BurntSushi/toml" "github.com/databricks/cli/libs/testdiff" "github.com/databricks/cli/libs/testserver" + ignore "github.com/sabhiram/go-gitignore" "github.com/stretchr/testify/require" ) @@ -51,6 +52,11 @@ type TestConfig struct { // List of request headers to include when recording requests. IncludeRequestHeaders []string + + // List of gitignore patterns to ignore when checking output files + Ignore []string + + CompiledIgnoreObject *ignore.GitIgnore } type ServerStub struct { @@ -111,6 +117,8 @@ func LoadConfig(t *testing.T, dir string) (TestConfig, string) { } } + result.CompiledIgnoreObject = ignore.CompileIgnoreLines(result.Ignore...) + return result, strings.Join(configs, ", ") } diff --git a/acceptance/selftest/basic/script b/acceptance/selftest/basic/script index bccf30e71..a3ec98402 100644 --- a/acceptance/selftest/basic/script +++ b/acceptance/selftest/basic/script @@ -27,3 +27,7 @@ echo 123456 printf "\n=== Testing --version" trace $CLI --version + +touch ignored_file.txt +mkdir ignored_dir +touch ignored_dir/hello.txt diff --git a/acceptance/selftest/basic/test.toml b/acceptance/selftest/basic/test.toml index 762e28ceb..3ca3d9255 100644 --- a/acceptance/selftest/basic/test.toml +++ b/acceptance/selftest/basic/test.toml @@ -1,5 +1,8 @@ # Badness = "Brief description of what's wrong with the test output, if anything" +Ignore = ['ignore*'] + + #[GOOS] # Disable on Windows #windows = false diff --git a/bundle/apps/validate.go b/bundle/apps/validate.go index fc50aeafc..7a4fe7126 100644 --- a/bundle/apps/validate.go +++ b/bundle/apps/validate.go @@ -3,8 +3,6 @@ package apps import ( "context" "fmt" - "path" - "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" @@ -14,7 +12,6 @@ type validate struct{} func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { var diags diag.Diagnostics - possibleConfigFiles := []string{"app.yml", "app.yaml"} usedSourceCodePaths := make(map[string]string) for key, app := range b.Config.Resources.Apps { @@ -28,16 +25,12 @@ func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics } usedSourceCodePaths[app.SourceCodePath] = key - for _, configFile := range possibleConfigFiles { - appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath) - cf := path.Join(appPath, configFile) - if _, err := b.SyncRoot.Stat(cf); err == nil { - diags = append(diags, diag.Diagnostic{ - Severity: diag.Error, - Summary: configFile + " detected", - Detail: fmt.Sprintf("remove %s and use 'config' property for app resource '%s' instead", cf, app.Name), - }) - } + if app.Config != nil { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: "App config section detected", + Detail: fmt.Sprintf("remove 'config' from app resource '%s' section and use app.yml file in the root of this app instead", key), + }) } } diff --git a/bundle/apps/validate_test.go b/bundle/apps/validate_test.go index 11270198e..d218f96ca 100644 --- a/bundle/apps/validate_test.go +++ b/bundle/apps/validate_test.go @@ -17,46 +17,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestAppsValidate(t *testing.T) { - tmpDir := t.TempDir() - testutil.Touch(t, tmpDir, "app1", "app.yml") - testutil.Touch(t, tmpDir, "app2", "app.py") - - b := &bundle.Bundle{ - BundleRootPath: tmpDir, - SyncRootPath: tmpDir, - SyncRoot: vfs.MustNew(tmpDir), - Config: config.Root{ - Workspace: config.Workspace{ - FilePath: "/foo/bar/", - }, - Resources: config.Resources{ - Apps: map[string]*resources.App{ - "app1": { - App: &apps.App{ - Name: "app1", - }, - SourceCodePath: "./app1", - }, - "app2": { - App: &apps.App{ - Name: "app2", - }, - SourceCodePath: "./app2", - }, - }, - }, - }, - } - - bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) - - diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate()) - require.Len(t, diags, 1) - require.Equal(t, "app.yml detected", diags[0].Summary) - require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource") -} - func TestAppsValidateSameSourcePath(t *testing.T) { tmpDir := t.TempDir() testutil.Touch(t, tmpDir, "app1", "app.py") diff --git a/bundle/bundle_read_only.go b/bundle/bundle_read_only.go deleted file mode 100644 index 4bdd94e59..000000000 --- a/bundle/bundle_read_only.go +++ /dev/null @@ -1,49 +0,0 @@ -package bundle - -import ( - "context" - - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/libs/vfs" - "github.com/databricks/databricks-sdk-go" -) - -type ReadOnlyBundle struct { - b *Bundle -} - -func ReadOnly(b *Bundle) ReadOnlyBundle { - return ReadOnlyBundle{b: b} -} - -func (r ReadOnlyBundle) Config() config.Root { - return r.b.Config -} - -func (r ReadOnlyBundle) RootPath() string { - return r.b.BundleRootPath -} - -func (r ReadOnlyBundle) BundleRoot() vfs.Path { - return r.b.BundleRoot -} - -func (r ReadOnlyBundle) SyncRoot() vfs.Path { - return r.b.SyncRoot -} - -func (r ReadOnlyBundle) WorktreeRoot() vfs.Path { - return r.b.WorktreeRoot -} - -func (r ReadOnlyBundle) WorkspaceClient() *databricks.WorkspaceClient { - return r.b.WorkspaceClient() -} - -func (r ReadOnlyBundle) CacheDir(ctx context.Context, paths ...string) (string, error) { - return r.b.CacheDir(ctx, paths...) -} - -func (r ReadOnlyBundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) { - return r.b.GetSyncIncludePatterns(ctx) -} diff --git a/bundle/config/loader/process_include.go b/bundle/config/loader/process_include.go index 4f7dd47ca..9458ff09e 100644 --- a/bundle/config/loader/process_include.go +++ b/bundle/config/loader/process_include.go @@ -165,10 +165,8 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: "Include section is defined outside root file", - Detail: `The include section is defined in a file that is not the root file. -These values will be ignored because only the includes defined in -the bundle root file (that is databricks.yml or databricks.yaml) -are loaded.`, + Detail: `An include section is defined in a file that is not databricks.yml. +Only includes defined in databricks.yml are applied.`, Locations: this.GetLocations("include"), Paths: []dyn.Path{dyn.MustPathFromString("include")}, }) diff --git a/bundle/config/validate/fast_validate.go b/bundle/config/validate/fast_validate.go index 47d83036d..e116034ad 100644 --- a/bundle/config/validate/fast_validate.go +++ b/bundle/config/validate/fast_validate.go @@ -14,18 +14,18 @@ import ( // 2. The validation is blocking for bundle deployments. // // The full suite of validation mutators is available in the [Validate] mutator. -type fastValidateReadonly struct{} +type fastValidate struct{ bundle.RO } -func FastValidateReadonly() bundle.ReadOnlyMutator { - return &fastValidateReadonly{} +func FastValidate() bundle.ReadOnlyMutator { + return &fastValidate{} } -func (f *fastValidateReadonly) Name() string { +func (f *fastValidate) Name() string { return "fast_validate(readonly)" } -func (f *fastValidateReadonly) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { - return bundle.ApplyReadOnly(ctx, rb, bundle.Parallel( +func (f *fastValidate) Apply(ctx context.Context, rb *bundle.Bundle) diag.Diagnostics { + return bundle.ApplyParallel(ctx, rb, // Fast mutators with only in-memory checks JobClusterKeyDefined(), JobTaskClusterSpec(), @@ -33,19 +33,5 @@ func (f *fastValidateReadonly) Apply(ctx context.Context, rb bundle.ReadOnlyBund // Blocking mutators. Deployments will fail if these checks fail. ValidateArtifactPath(), - )) -} - -type fastValidate struct{} - -func FastValidate() bundle.Mutator { - return &fastValidate{} -} - -func (f *fastValidate) Name() string { - return "fast_validate" -} - -func (f *fastValidate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), FastValidateReadonly()) + ) } diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index b4de06773..aea78f710 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -13,20 +13,20 @@ func FilesToSync() bundle.ReadOnlyMutator { return &filesToSync{} } -type filesToSync struct{} +type filesToSync struct{ bundle.RO } func (v *filesToSync) Name() string { return "validate:files_to_sync" } -func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (v *filesToSync) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // The user may be intentional about not synchronizing any files. // In this case, we should not show any warnings. - if len(rb.Config().Sync.Paths) == 0 { + if len(b.Config.Sync.Paths) == 0 { return nil } - sync, err := files.GetSync(ctx, rb) + sync, err := files.GetSync(ctx, b) if err != nil { return diag.FromErr(err) } @@ -42,20 +42,20 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag. } diags := diag.Diagnostics{} - if len(rb.Config().Sync.Exclude) == 0 { + if len(b.Config.Sync.Exclude) == 0 { diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore", }) } else { - loc := location{path: "sync.exclude", rb: rb} + path := "sync.exclude" diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", // Show all locations where sync.exclude is defined, since merging // sync.exclude is additive. - Locations: loc.Locations(), - Paths: []dyn.Path{loc.Path()}, + Locations: b.Config.GetLocations(path), + Paths: []dyn.Path{dyn.MustPathFromString(path)}, }) } diff --git a/bundle/config/validate/files_to_sync_test.go b/bundle/config/validate/files_to_sync_test.go index dd40295c3..0a5f69727 100644 --- a/bundle/config/validate/files_to_sync_test.go +++ b/bundle/config/validate/files_to_sync_test.go @@ -29,8 +29,7 @@ func TestFilesToSync_NoPaths(t *testing.T) { } ctx := context.Background() - rb := bundle.ReadOnly(b) - diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync()) + diags := FilesToSync().Apply(ctx, b) assert.Empty(t, diags) } @@ -85,8 +84,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) { testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n") ctx := context.Background() - rb := bundle.ReadOnly(b) - diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync()) + diags := FilesToSync().Apply(ctx, b) require.Len(t, diags, 1) assert.Equal(t, diag.Warning, diags[0].Severity) assert.Equal(t, "There are no files to sync, please check your .gitignore", diags[0].Summary) @@ -99,8 +97,7 @@ func TestFilesToSync_EverythingExcluded(t *testing.T) { b.Config.Sync.Exclude = []string{"*"} ctx := context.Background() - rb := bundle.ReadOnly(b) - diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync()) + diags := FilesToSync().Apply(ctx, b) require.Len(t, diags, 1) assert.Equal(t, diag.Warning, diags[0].Severity) assert.Equal(t, "There are no files to sync, please check your .gitignore and sync.exclude configuration", diags[0].Summary) diff --git a/bundle/config/validate/folder_permissions.go b/bundle/config/validate/folder_permissions.go index 7b12b4b16..575702c34 100644 --- a/bundle/config/validate/folder_permissions.go +++ b/bundle/config/validate/folder_permissions.go @@ -16,15 +16,14 @@ import ( "golang.org/x/sync/errgroup" ) -type folderPermissions struct{} +type folderPermissions struct{ bundle.RO } -// Apply implements bundle.ReadOnlyMutator. -func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics { - if len(b.Config().Permissions) == 0 { +func (f *folderPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + if len(b.Config.Permissions) == 0 { return nil } - bundlePaths := paths.CollectUniqueWorkspacePathPrefixes(b.Config().Workspace) + bundlePaths := paths.CollectUniqueWorkspacePathPrefixes(b.Config.Workspace) var diags diag.Diagnostics g, ctx := errgroup.WithContext(ctx) @@ -48,7 +47,7 @@ func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) return diags } -func checkFolderPermission(ctx context.Context, b bundle.ReadOnlyBundle, folderPath string) diag.Diagnostics { +func checkFolderPermission(ctx context.Context, b *bundle.Bundle, folderPath string) diag.Diagnostics { // If the folder is shared, then we don't need to check permissions as it was already checked in the other mutator before. if libraries.IsWorkspaceSharedPath(folderPath) { return nil @@ -69,7 +68,7 @@ func checkFolderPermission(ctx context.Context, b bundle.ReadOnlyBundle, folderP } p := permissions.ObjectAclToResourcePermissions(folderPath, objPermissions.AccessControlList) - return p.Compare(b.Config().Permissions) + return p.Compare(b.Config.Permissions) } func getClosestExistingObject(ctx context.Context, w workspace.WorkspaceInterface, folderPath string) (*workspace.ObjectInfo, error) { diff --git a/bundle/config/validate/folder_permissions_test.go b/bundle/config/validate/folder_permissions_test.go index 8e68c9fbf..40906ce6f 100644 --- a/bundle/config/validate/folder_permissions_test.go +++ b/bundle/config/validate/folder_permissions_test.go @@ -69,9 +69,7 @@ func TestFolderPermissionsInheritedWhenRootPathDoesNotExist(t *testing.T) { }, nil) b.SetWorkpaceClient(m.WorkspaceClient) - rb := bundle.ReadOnly(b) - - diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions()) + diags := ValidateFolderPermissions().Apply(context.Background(), b) require.Empty(t, diags) } @@ -118,9 +116,7 @@ func TestValidateFolderPermissionsFailsOnMissingBundlePermission(t *testing.T) { }, nil) b.SetWorkpaceClient(m.WorkspaceClient) - rb := bundle.ReadOnly(b) - - diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions()) + diags := ValidateFolderPermissions().Apply(context.Background(), b) require.Len(t, diags, 1) require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary) require.Equal(t, diag.Warning, diags[0].Severity) @@ -164,9 +160,7 @@ func TestValidateFolderPermissionsFailsOnPermissionMismatch(t *testing.T) { }, nil) b.SetWorkpaceClient(m.WorkspaceClient) - rb := bundle.ReadOnly(b) - - diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions()) + diags := ValidateFolderPermissions().Apply(context.Background(), b) require.Len(t, diags, 1) require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary) require.Equal(t, diag.Warning, diags[0].Severity) @@ -199,9 +193,7 @@ func TestValidateFolderPermissionsFailsOnNoRootFolder(t *testing.T) { }) b.SetWorkpaceClient(m.WorkspaceClient) - rb := bundle.ReadOnly(b) - - diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions()) + diags := ValidateFolderPermissions().Apply(context.Background(), b) require.Len(t, diags, 1) require.Equal(t, "folder / and its parent folders do not exist", diags[0].Summary) require.Equal(t, diag.Error, diags[0].Severity) diff --git a/bundle/config/validate/interpolation_in_auth_config.go b/bundle/config/validate/interpolation_in_auth_config.go index 83d6c6ae1..1a5b64a26 100644 --- a/bundle/config/validate/interpolation_in_auth_config.go +++ b/bundle/config/validate/interpolation_in_auth_config.go @@ -24,7 +24,10 @@ func (f *noInterpolationInAuthConfig) Name() string { func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { authFields := []string{ // Generic attributes. - "host", "profile", "auth_type", "metadata_service_url", + "host", + "profile", + "auth_type", + "metadata_service_url", // OAuth specific attributes. "client_id", @@ -33,8 +36,12 @@ func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundl "google_service_account", // Azure specific attributes. - "azure_resource_id", "azure_use_msi", "azure_client_id", "azure_tenant_id", - "azure_environment", "azure_login_app_id", + "azure_resource_id", + "azure_use_msi", + "azure_client_id", + "azure_tenant_id", + "azure_environment", + "azure_login_app_id", } diags := diag.Diagnostics{} @@ -49,12 +56,11 @@ func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundl return diag.FromErr(err) } - if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { + vv, ok := v.AsString() + if !ok { continue } - vv := v.MustString() - // Check if the field contains interpolation. if dynvar.ContainsVariableReference(vv) { envVar, ok := auth.GetEnvFor(fieldName) diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index c3a1ab3df..5ae2f5437 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -13,16 +13,16 @@ func JobClusterKeyDefined() bundle.ReadOnlyMutator { return &jobClusterKeyDefined{} } -type jobClusterKeyDefined struct{} +type jobClusterKeyDefined struct{ bundle.RO } func (v *jobClusterKeyDefined) Name() string { return "validate:job_cluster_key_defined" } -func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (v *jobClusterKeyDefined) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} - for k, job := range rb.Config().Resources.Jobs { + for k, job := range b.Config.Resources.Jobs { jobClusterKeys := make(map[string]bool) for _, cluster := range job.JobClusters { if cluster.JobClusterKey != "" { @@ -33,10 +33,7 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund for index, task := range job.Tasks { if task.JobClusterKey != "" { if _, ok := jobClusterKeys[task.JobClusterKey]; !ok { - loc := location{ - path: fmt.Sprintf("resources.jobs.%s.tasks[%d].job_cluster_key", k, index), - rb: rb, - } + path := fmt.Sprintf("resources.jobs.%s.tasks[%d].job_cluster_key", k, index) diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, @@ -44,8 +41,8 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund // Show only the location where the job_cluster_key is defined. // Other associated locations are not relevant since they are // overridden during merging. - Locations: []dyn.Location{loc.Location()}, - Paths: []dyn.Path{loc.Path()}, + Locations: b.Config.GetLocations(path), + Paths: []dyn.Path{dyn.MustPathFromString(path)}, }) } } diff --git a/bundle/config/validate/job_cluster_key_defined_test.go b/bundle/config/validate/job_cluster_key_defined_test.go index 2cbdb7c6a..559bd1c46 100644 --- a/bundle/config/validate/job_cluster_key_defined_test.go +++ b/bundle/config/validate/job_cluster_key_defined_test.go @@ -33,7 +33,7 @@ func TestJobClusterKeyDefined(t *testing.T) { }, } - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) + diags := JobClusterKeyDefined().Apply(context.Background(), b) require.Empty(t, diags) require.NoError(t, diags.Error()) } @@ -56,7 +56,7 @@ func TestJobClusterKeyNotDefined(t *testing.T) { }, } - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) + diags := JobClusterKeyDefined().Apply(context.Background(), b) require.Len(t, diags, 1) require.NoError(t, diags.Error()) require.Equal(t, diag.Warning, diags[0].Severity) @@ -89,7 +89,7 @@ func TestJobClusterKeyDefinedInDifferentJob(t *testing.T) { }, } - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) + diags := JobClusterKeyDefined().Apply(context.Background(), b) require.Len(t, diags, 1) require.NoError(t, diags.Error()) require.Equal(t, diag.Warning, diags[0].Severity) diff --git a/bundle/config/validate/job_task_cluster_spec.go b/bundle/config/validate/job_task_cluster_spec.go index 5f532acfe..79672be63 100644 --- a/bundle/config/validate/job_task_cluster_spec.go +++ b/bundle/config/validate/job_task_cluster_spec.go @@ -17,31 +17,31 @@ func JobTaskClusterSpec() bundle.ReadOnlyMutator { return &jobTaskClusterSpec{} } -type jobTaskClusterSpec struct{} +type jobTaskClusterSpec struct{ bundle.RO } func (v *jobTaskClusterSpec) Name() string { return "validate:job_task_cluster_spec" } -func (v *jobTaskClusterSpec) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (v *jobTaskClusterSpec) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")) - for resourceName, job := range rb.Config().Resources.Jobs { + for resourceName, job := range b.Config.Resources.Jobs { resourcePath := jobsPath.Append(dyn.Key(resourceName)) for taskIndex, task := range job.Tasks { taskPath := resourcePath.Append(dyn.Key("tasks"), dyn.Index(taskIndex)) - diags = diags.Extend(validateJobTask(rb, task, taskPath)) + diags = diags.Extend(validateJobTask(b, task, taskPath)) } } return diags } -func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path) diag.Diagnostics { +func validateJobTask(b *bundle.Bundle, task jobs.Task, taskPath dyn.Path) diag.Diagnostics { diags := diag.Diagnostics{} var specified []string @@ -74,7 +74,7 @@ func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path if task.ForEachTask != nil { forEachTaskPath := taskPath.Append(dyn.Key("for_each_task"), dyn.Key("task")) - diags = diags.Extend(validateJobTask(rb, task.ForEachTask.Task, forEachTaskPath)) + diags = diags.Extend(validateJobTask(b, task.ForEachTask.Task, forEachTaskPath)) } if isComputeTask(task) && len(specified) == 0 { @@ -92,7 +92,7 @@ func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path Severity: diag.Error, Summary: "Missing required cluster or environment settings", Detail: detail, - Locations: rb.Config().GetLocations(taskPath.String()), + Locations: b.Config.GetLocations(taskPath.String()), Paths: []dyn.Path{taskPath}, }) } diff --git a/bundle/config/validate/job_task_cluster_spec_test.go b/bundle/config/validate/job_task_cluster_spec_test.go index a3a7ccf25..fd316d61f 100644 --- a/bundle/config/validate/job_task_cluster_spec_test.go +++ b/bundle/config/validate/job_task_cluster_spec_test.go @@ -174,7 +174,7 @@ Specify one of the following fields: job_cluster_key, environment_key, existing_ } b := createBundle(map[string]*resources.Job{"job1": job}) - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobTaskClusterSpec()) + diags := JobTaskClusterSpec().Apply(context.Background(), b) if tc.errorPath != "" || tc.errorDetail != "" || tc.errorSummary != "" { assert.Len(t, diags, 1) diff --git a/bundle/config/validate/single_node_cluster.go b/bundle/config/validate/single_node_cluster.go index 7c159f61a..acc5f444d 100644 --- a/bundle/config/validate/single_node_cluster.go +++ b/bundle/config/validate/single_node_cluster.go @@ -16,7 +16,7 @@ func SingleNodeCluster() bundle.ReadOnlyMutator { return &singleNodeCluster{} } -type singleNodeCluster struct{} +type singleNodeCluster struct{ bundle.RO } func (m *singleNodeCluster) Name() string { return "validate:SingleNodeCluster" @@ -98,7 +98,7 @@ func showSingleNodeClusterWarning(ctx context.Context, v dyn.Value) bool { return false } -func (m *singleNodeCluster) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (m *singleNodeCluster) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags := diag.Diagnostics{} patterns := []dyn.Pattern{ @@ -115,7 +115,7 @@ func (m *singleNodeCluster) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) } for _, p := range patterns { - _, err := dyn.MapByPattern(rb.Config().Value(), p, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + _, err := dyn.MapByPattern(b.Config.Value(), p, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { warning := diag.Diagnostic{ Severity: diag.Warning, Summary: singleNodeWarningSummary, diff --git a/bundle/config/validate/single_node_cluster_test.go b/bundle/config/validate/single_node_cluster_test.go index be93420c6..c6d5f8ca9 100644 --- a/bundle/config/validate/single_node_cluster_test.go +++ b/bundle/config/validate/single_node_cluster_test.go @@ -116,7 +116,7 @@ func TestValidateSingleNodeClusterFailForInteractiveClusters(t *testing.T) { bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) { return dyn.Set(v, "resources.clusters.foo.num_workers", dyn.V(0)) }) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := SingleNodeCluster().Apply(ctx, b) assert.Equal(t, diag.Diagnostics{ { Severity: diag.Warning, @@ -165,7 +165,7 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) { return dyn.Set(v, "resources.jobs.foo.job_clusters[0].new_cluster.num_workers", dyn.V(0)) }) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := SingleNodeCluster().Apply(ctx, b) assert.Equal(t, diag.Diagnostics{ { Severity: diag.Warning, @@ -214,7 +214,7 @@ func TestValidateSingleNodeClusterFailForJobTaskClusters(t *testing.T) { return dyn.Set(v, "resources.jobs.foo.tasks[0].new_cluster.num_workers", dyn.V(0)) }) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Equal(t, diag.Diagnostics{ { Severity: diag.Warning, @@ -260,7 +260,7 @@ func TestValidateSingleNodeClusterFailForPipelineClusters(t *testing.T) { return dyn.Set(v, "resources.pipelines.foo.clusters[0].num_workers", dyn.V(0)) }) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Equal(t, diag.Diagnostics{ { Severity: diag.Warning, @@ -313,7 +313,7 @@ func TestValidateSingleNodeClusterFailForJobForEachTaskCluster(t *testing.T) { return dyn.Set(v, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster.num_workers", dyn.V(0)) }) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Equal(t, diag.Diagnostics{ { Severity: diag.Warning, @@ -397,7 +397,7 @@ func TestValidateSingleNodeClusterPassInteractiveClusters(t *testing.T) { }) } - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Empty(t, diags) }) } @@ -437,7 +437,7 @@ func TestValidateSingleNodeClusterPassJobClusters(t *testing.T) { }) } - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Empty(t, diags) }) } @@ -477,7 +477,7 @@ func TestValidateSingleNodeClusterPassJobTaskClusters(t *testing.T) { }) } - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Empty(t, diags) }) } @@ -514,7 +514,7 @@ func TestValidateSingleNodeClusterPassPipelineClusters(t *testing.T) { }) } - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Empty(t, diags) }) } @@ -558,7 +558,7 @@ func TestValidateSingleNodeClusterPassJobForEachTaskCluster(t *testing.T) { }) } - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) + diags := bundle.Apply(ctx, b, SingleNodeCluster()) assert.Empty(t, diags) }) } diff --git a/bundle/config/validate/validate.go b/bundle/config/validate/validate.go index 8fdd704ab..83d17337d 100644 --- a/bundle/config/validate/validate.go +++ b/bundle/config/validate/validate.go @@ -5,46 +5,16 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" - "github.com/databricks/cli/libs/dyn" ) -type validate struct{} - -type location struct { - path string - rb bundle.ReadOnlyBundle -} - -func (l location) Location() dyn.Location { - return l.rb.Config().GetLocation(l.path) -} - -func (l location) Locations() []dyn.Location { - return l.rb.Config().GetLocations(l.path) -} - -func (l location) Path() dyn.Path { - return dyn.MustPathFromString(l.path) -} - -// Apply implements bundle.Mutator. -func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), bundle.Parallel( - FastValidateReadonly(), +func Validate(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + return bundle.ApplyParallel(ctx, b, + FastValidate(), // Slow mutators that require network or file i/o. These are only // run in the `bundle validate` command. FilesToSync(), ValidateFolderPermissions(), ValidateSyncPatterns(), - )) -} - -// Name implements bundle.Mutator. -func (v *validate) Name() string { - return "validate" -} - -func Validate() bundle.Mutator { - return &validate{} + ) } diff --git a/bundle/config/validate/validate_artifact_path.go b/bundle/config/validate/validate_artifact_path.go index aa4492670..78536d4bd 100644 --- a/bundle/config/validate/validate_artifact_path.go +++ b/bundle/config/validate/validate_artifact_path.go @@ -16,7 +16,7 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" ) -type validateArtifactPath struct{} +type validateArtifactPath struct{ bundle.RO } func ValidateArtifactPath() bundle.ReadOnlyMutator { return &validateArtifactPath{} @@ -74,9 +74,9 @@ func findVolumeInBundle(r config.Root, catalogName, schemaName, volumeName strin return nil, nil, false } -func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { +func (v *validateArtifactPath) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // We only validate UC Volumes paths right now. - if !libraries.IsVolumesPath(rb.Config().Workspace.ArtifactPath) { + if !libraries.IsVolumesPath(b.Config.Workspace.ArtifactPath) { return nil } @@ -85,25 +85,25 @@ func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBund { Summary: s, Severity: diag.Error, - Locations: rb.Config().GetLocations("workspace.artifact_path"), + Locations: b.Config.GetLocations("workspace.artifact_path"), Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, }, } } - catalogName, schemaName, volumeName, err := extractVolumeFromPath(rb.Config().Workspace.ArtifactPath) + catalogName, schemaName, volumeName, err := extractVolumeFromPath(b.Config.Workspace.ArtifactPath) if err != nil { return wrapErrorMsg(err.Error()) } volumeFullName := fmt.Sprintf("%s.%s.%s", catalogName, schemaName, volumeName) - w := rb.WorkspaceClient() + w := b.WorkspaceClient() _, err = w.Volumes.ReadByName(ctx, volumeFullName) if errors.Is(err, apierr.ErrPermissionDenied) { return wrapErrorMsg(fmt.Sprintf("cannot access volume %s: %s", volumeFullName, err)) } if errors.Is(err, apierr.ErrNotFound) { - path, locations, ok := findVolumeInBundle(rb.Config(), catalogName, schemaName, volumeName) + path, locations, ok := findVolumeInBundle(b.Config, catalogName, schemaName, volumeName) if !ok { return wrapErrorMsg(fmt.Sprintf("volume %s does not exist", volumeFullName)) } @@ -117,7 +117,7 @@ func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBund this bundle but which has not been deployed yet. Please first deploy the volume using 'bundle deploy' and then switch over to using it in the artifact_path.`, - Locations: slices.Concat(rb.Config().GetLocations("workspace.artifact_path"), locations), + Locations: slices.Concat(b.Config.GetLocations("workspace.artifact_path"), locations), Paths: append([]dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, path), }} diff --git a/bundle/config/validate/validate_artifact_path_test.go b/bundle/config/validate/validate_artifact_path_test.go index e1ae6af34..3f998567b 100644 --- a/bundle/config/validate/validate_artifact_path_test.go +++ b/bundle/config/validate/validate_artifact_path_test.go @@ -49,7 +49,7 @@ func TestValidateArtifactPathWithVolumeInBundle(t *testing.T) { }) b.SetWorkpaceClient(m.WorkspaceClient) - diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), ValidateArtifactPath()) + diags := ValidateArtifactPath().Apply(ctx, b) assert.Equal(t, diag.Diagnostics{{ Severity: diag.Error, Summary: "volume catalogN.schemaN.volumeN does not exist", @@ -88,7 +88,6 @@ func TestValidateArtifactPath(t *testing.T) { }}, diags) } - rb := bundle.ReadOnly(b) ctx := context.Background() tcases := []struct { @@ -123,7 +122,7 @@ func TestValidateArtifactPath(t *testing.T) { api.EXPECT().ReadByName(mock.Anything, "catalogN.schemaN.volumeN").Return(nil, tc.err) b.SetWorkpaceClient(m.WorkspaceClient) - diags := bundle.ApplyReadOnly(ctx, rb, ValidateArtifactPath()) + diags := ValidateArtifactPath().Apply(ctx, b) assertDiags(t, diags, tc.expectedSummary) } } @@ -167,7 +166,7 @@ func TestValidateArtifactPathWithInvalidPaths(t *testing.T) { bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}) - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), ValidateArtifactPath()) + diags := ValidateArtifactPath().Apply(context.Background(), b) require.Equal(t, diag.Diagnostics{{ Severity: diag.Error, Summary: "expected UC volume path to be in the format /Volumes////..., got " + p, diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index 04acd28ab..04df36e4f 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -17,24 +17,24 @@ func ValidateSyncPatterns() bundle.ReadOnlyMutator { return &validateSyncPatterns{} } -type validateSyncPatterns struct{} +type validateSyncPatterns struct{ bundle.RO } func (v *validateSyncPatterns) Name() string { return "validate:validate_sync_patterns" } -func (v *validateSyncPatterns) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { - s := rb.Config().Sync +func (v *validateSyncPatterns) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + s := b.Config.Sync if len(s.Exclude) == 0 && len(s.Include) == 0 { return nil } - diags, err := checkPatterns(s.Exclude, "sync.exclude", rb) + diags, err := checkPatterns(s.Exclude, "sync.exclude", b) if err != nil { return diag.FromErr(err) } - includeDiags, err := checkPatterns(s.Include, "sync.include", rb) + includeDiags, err := checkPatterns(s.Include, "sync.include", b) if err != nil { return diag.FromErr(err) } @@ -42,7 +42,7 @@ func (v *validateSyncPatterns) Apply(ctx context.Context, rb bundle.ReadOnlyBund return diags.Extend(includeDiags) } -func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (diag.Diagnostics, error) { +func checkPatterns(patterns []string, path string, b *bundle.Bundle) (diag.Diagnostics, error) { var mu sync.Mutex var errs errgroup.Group var diags diag.Diagnostics @@ -55,7 +55,7 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di // it means: do not include these files into result set p := strings.TrimPrefix(pattern, "!") errs.Go(func() error { - fs, err := fileset.NewGlobSet(rb.BundleRoot(), []string{p}) + fs, err := fileset.NewGlobSet(b.BundleRoot, []string{p}) if err != nil { return err } @@ -66,13 +66,13 @@ func checkPatterns(patterns []string, path string, rb bundle.ReadOnlyBundle) (di } if len(all) == 0 { - loc := location{path: fmt.Sprintf("%s[%d]", path, index), rb: rb} + path := fmt.Sprintf("%s[%d]", path, index) mu.Lock() diags = diags.Append(diag.Diagnostic{ Severity: diag.Warning, Summary: fmt.Sprintf("Pattern %s does not match any files", pattern), - Locations: []dyn.Location{loc.Location()}, - Paths: []dyn.Path{loc.Path()}, + Locations: b.Config.GetLocations(path), + Paths: []dyn.Path{dyn.MustPathFromString(path)}, }) mu.Unlock() } diff --git a/bundle/deploy/files/delete.go b/bundle/deploy/files/delete.go index bb28c2722..971186d5b 100644 --- a/bundle/deploy/files/delete.go +++ b/bundle/deploy/files/delete.go @@ -40,7 +40,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { } func deleteSnapshotFile(ctx context.Context, b *bundle.Bundle) error { - opts, err := GetSyncOptions(ctx, bundle.ReadOnly(b)) + opts, err := GetSyncOptions(ctx, b) if err != nil { return fmt.Errorf("cannot get sync options: %w", err) } diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index e3abc5fef..a3ead13a4 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -8,43 +8,43 @@ import ( "github.com/databricks/cli/libs/sync" ) -func GetSync(ctx context.Context, rb bundle.ReadOnlyBundle) (*sync.Sync, error) { - opts, err := GetSyncOptions(ctx, rb) +func GetSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { + opts, err := GetSyncOptions(ctx, b) if err != nil { return nil, fmt.Errorf("cannot get sync options: %w", err) } return sync.New(ctx, *opts) } -func GetSyncOptions(ctx context.Context, rb bundle.ReadOnlyBundle) (*sync.SyncOptions, error) { - cacheDir, err := rb.CacheDir(ctx) +func GetSyncOptions(ctx context.Context, b *bundle.Bundle) (*sync.SyncOptions, error) { + cacheDir, err := b.CacheDir(ctx) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } - includes, err := rb.GetSyncIncludePatterns(ctx) + includes, err := b.GetSyncIncludePatterns(ctx) if err != nil { return nil, fmt.Errorf("cannot get list of sync includes: %w", err) } opts := &sync.SyncOptions{ - WorktreeRoot: rb.WorktreeRoot(), - LocalRoot: rb.SyncRoot(), - Paths: rb.Config().Sync.Paths, + WorktreeRoot: b.WorktreeRoot, + LocalRoot: b.SyncRoot, + Paths: b.Config.Sync.Paths, Include: includes, - Exclude: rb.Config().Sync.Exclude, + Exclude: b.Config.Sync.Exclude, - RemotePath: rb.Config().Workspace.FilePath, - Host: rb.WorkspaceClient().Config.Host, + RemotePath: b.Config.Workspace.FilePath, + Host: b.WorkspaceClient().Config.Host, Full: false, SnapshotBasePath: cacheDir, - WorkspaceClient: rb.WorkspaceClient(), + WorkspaceClient: b.WorkspaceClient(), } - if rb.Config().Workspace.CurrentUser != nil { - opts.CurrentUser = rb.Config().Workspace.CurrentUser.User + if b.Config.Workspace.CurrentUser != nil { + opts.CurrentUser = b.Config.Workspace.CurrentUser.User } return opts, nil diff --git a/bundle/deploy/files/upload.go b/bundle/deploy/files/upload.go index 452850dc4..bb46c97c9 100644 --- a/bundle/deploy/files/upload.go +++ b/bundle/deploy/files/upload.go @@ -30,7 +30,7 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { } cmdio.LogString(ctx, fmt.Sprintf("Uploading bundle files to %s...", b.Config.Workspace.FilePath)) - opts, err := GetSyncOptions(ctx, bundle.ReadOnly(b)) + opts, err := GetSyncOptions(ctx, b) if err != nil { return diag.FromErr(err) } diff --git a/bundle/deploy/state_pull.go b/bundle/deploy/state_pull.go index 8fffca073..844dcb77e 100644 --- a/bundle/deploy/state_pull.go +++ b/bundle/deploy/state_pull.go @@ -85,7 +85,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic } // Create a new snapshot based on the deployment state file. - opts, err := files.GetSyncOptions(ctx, bundle.ReadOnly(b)) + opts, err := files.GetSyncOptions(ctx, b) if err != nil { return diag.FromErr(err) } diff --git a/bundle/deploy/state_pull_test.go b/bundle/deploy/state_pull_test.go index f38b71f6b..b3d838fce 100644 --- a/bundle/deploy/state_pull_test.go +++ b/bundle/deploy/state_pull_test.go @@ -92,7 +92,7 @@ func testStatePull(t *testing.T, opts statePullOpts) { } if opts.withExistingSnapshot { - opts, err := files.GetSyncOptions(ctx, bundle.ReadOnly(b)) + opts, err := files.GetSyncOptions(ctx, b) require.NoError(t, err) snapshotPath, err := sync.SnapshotPath(opts) @@ -134,7 +134,7 @@ func testStatePull(t *testing.T, opts statePullOpts) { } if opts.expects.snapshotState != nil { - syncOpts, err := files.GetSyncOptions(ctx, bundle.ReadOnly(b)) + syncOpts, err := files.GetSyncOptions(ctx, b) require.NoError(t, err) snapshotPath, err := sync.SnapshotPath(syncOpts) diff --git a/bundle/libraries/same_name_libraries.go b/bundle/libraries/same_name_libraries.go index ab869d3d2..907843193 100644 --- a/bundle/libraries/same_name_libraries.go +++ b/bundle/libraries/same_name_libraries.go @@ -13,8 +13,10 @@ import ( type checkForSameNameLibraries struct{} var patterns = []dyn.Pattern{ - taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()), - forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()), + taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.Key("whl")), + taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.Key("jar")), + forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.Key("whl")), + forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.Key("jar")), envDepsPattern.Append(dyn.AnyIndex()), } diff --git a/bundle/mutator_read_only.go b/bundle/mutator_read_only.go index 700a90d8d..87857022d 100644 --- a/bundle/mutator_read_only.go +++ b/bundle/mutator_read_only.go @@ -2,28 +2,59 @@ package bundle import ( "context" + "sync" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" ) -// ReadOnlyMutator is the interface type that allows access to bundle configuration but does not allow any mutations. type ReadOnlyMutator interface { - // Name returns the mutators name. - Name() string + Mutator - // Apply access the specified read-only bundle object. - Apply(context.Context, ReadOnlyBundle) diag.Diagnostics + // This is just tag, to differentiate this interface from bundle.Mutator + // This prevents non-readonly mutators being passed to ApplyParallel(). + IsRO() } -func ApplyReadOnly(ctx context.Context, rb ReadOnlyBundle, m ReadOnlyMutator) diag.Diagnostics { - ctx = log.NewContext(ctx, log.GetLogger(ctx).With("mutator (read-only)", m.Name())) +// Helper to mark the mutator as "read-only" +type RO struct{} - log.Debugf(ctx, "ApplyReadOnly") - diags := m.Apply(ctx, rb) - if err := diags.Error(); err != nil { - log.Debugf(ctx, "Error: %s", err) +func (*RO) IsRO() {} + +// Run mutators in parallel. Unlike Apply and ApplySeq, this does not perform sync between +// dynamic and static configuration. +// Warning: none of the mutators involved must modify bundle directly or indirectly. In particular, +// they must not call bundle.Apply or bundle.ApplySeq because those include writes to config even if mutator does not. +// Deprecated: do not use for new use cases. Refactor your parallel task not to depend on bundle at all. +func ApplyParallel(ctx context.Context, b *Bundle, mutators ...ReadOnlyMutator) diag.Diagnostics { + var allDiags diag.Diagnostics + resultsChan := make(chan diag.Diagnostics, len(mutators)) + var wg sync.WaitGroup + + contexts := make([]context.Context, len(mutators)) + + for ind, m := range mutators { + contexts[ind] = log.NewContext(ctx, log.GetLogger(ctx).With("mutator", m.Name())) + // log right away to have deterministic order of log messages + log.Debug(contexts[ind], "ApplyParallel") } - return diags + for ind, m := range mutators { + wg.Add(1) + go func() { + defer wg.Done() + // We're not using bundle.Apply here because we don't do copy between typed and dynamic values + resultsChan <- m.Apply(contexts[ind], b) + }() + } + + wg.Wait() + close(resultsChan) + + // Collect results into a single slice + for diags := range resultsChan { + allDiags = append(allDiags, diags...) + } + + return allDiags } diff --git a/bundle/parallel.go b/bundle/parallel.go deleted file mode 100644 index ebb91661a..000000000 --- a/bundle/parallel.go +++ /dev/null @@ -1,43 +0,0 @@ -package bundle - -import ( - "context" - "sync" - - "github.com/databricks/cli/libs/diag" -) - -type parallel struct { - mutators []ReadOnlyMutator -} - -func (m *parallel) Name() string { - return "parallel" -} - -func (m *parallel) Apply(ctx context.Context, rb ReadOnlyBundle) diag.Diagnostics { - var wg sync.WaitGroup - var mu sync.Mutex - var diags diag.Diagnostics - - wg.Add(len(m.mutators)) - for _, mutator := range m.mutators { - go func(mutator ReadOnlyMutator) { - defer wg.Done() - d := ApplyReadOnly(ctx, rb, mutator) - - mu.Lock() - diags = diags.Extend(d) - mu.Unlock() - }(mutator) - } - wg.Wait() - return diags -} - -// Parallel runs the given mutators in parallel. -func Parallel(mutators ...ReadOnlyMutator) ReadOnlyMutator { - return ¶llel{ - mutators: mutators, - } -} diff --git a/bundle/parallel_test.go b/bundle/parallel_test.go deleted file mode 100644 index dfc7ddac9..000000000 --- a/bundle/parallel_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package bundle - -import ( - "context" - "sync" - "testing" - - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/libs/diag" - "github.com/stretchr/testify/require" -) - -type addToContainer struct { - t *testing.T - container *[]int - value int - err bool - - // mu is a mutex that protects container. It is used to ensure that the - // container slice is only modified by one goroutine at a time. - mu *sync.Mutex -} - -func (m *addToContainer) Apply(ctx context.Context, b ReadOnlyBundle) diag.Diagnostics { - if m.err { - return diag.Errorf("error") - } - - m.mu.Lock() - *m.container = append(*m.container, m.value) - m.mu.Unlock() - - return nil -} - -func (m *addToContainer) Name() string { - return "addToContainer" -} - -func TestParallelMutatorWork(t *testing.T) { - b := &Bundle{ - Config: config.Root{}, - } - - container := []int{} - var mu sync.Mutex - m1 := &addToContainer{t: t, container: &container, value: 1, mu: &mu} - m2 := &addToContainer{t: t, container: &container, value: 2, mu: &mu} - m3 := &addToContainer{t: t, container: &container, value: 3, mu: &mu} - - m := Parallel(m1, m2, m3) - - // Apply the mutator - diags := ApplyReadOnly(context.Background(), ReadOnly(b), m) - require.Empty(t, diags) - require.Len(t, container, 3) - require.Contains(t, container, 1) - require.Contains(t, container, 2) - require.Contains(t, container, 3) -} - -func TestParallelMutatorWorkWithErrors(t *testing.T) { - b := &Bundle{ - Config: config.Root{}, - } - - container := []int{} - var mu sync.Mutex - m1 := &addToContainer{container: &container, value: 1, mu: &mu} - m2 := &addToContainer{container: &container, err: true, value: 2, mu: &mu} - m3 := &addToContainer{container: &container, value: 3, mu: &mu} - - m := Parallel(m1, m2, m3) - - // Apply the mutator - diags := ApplyReadOnly(context.Background(), ReadOnly(b), m) - require.Len(t, diags, 1) - require.Equal(t, "error", diags[0].Summary) - require.Len(t, container, 2) - require.Contains(t, container, 1) - require.Contains(t, container, 3) -} diff --git a/bundle/tests/enviroment_key_test.go b/bundle/tests/enviroment_key_test.go index 135ef1917..c22059313 100644 --- a/bundle/tests/enviroment_key_test.go +++ b/bundle/tests/enviroment_key_test.go @@ -9,11 +9,6 @@ import ( "github.com/stretchr/testify/require" ) -func TestEnvironmentKeySupported(t *testing.T) { - _, diags := loadTargetWithDiags("./python_wheel/environment_key", "default") - require.Empty(t, diags) -} - func TestEnvironmentKeyProvidedAndNoPanic(t *testing.T) { b, diags := loadTargetWithDiags("./environment_key_only", "default") require.Empty(t, diags) diff --git a/bundle/tests/job_cluster_key_test.go b/bundle/tests/job_cluster_key_test.go index 6a08da89c..0306bf7b5 100644 --- a/bundle/tests/job_cluster_key_test.go +++ b/bundle/tests/job_cluster_key_test.go @@ -13,7 +13,7 @@ import ( func TestJobClusterKeyNotDefinedTest(t *testing.T) { b := loadTarget(t, "./job_cluster_key", "default") - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.JobClusterKeyDefined()) + diags := bundle.Apply(context.Background(), b, validate.JobClusterKeyDefined()) require.Len(t, diags, 1) require.NoError(t, diags.Error()) require.Equal(t, diag.Warning, diags[0].Severity) @@ -23,6 +23,6 @@ func TestJobClusterKeyNotDefinedTest(t *testing.T) { func TestJobClusterKeyDefinedTest(t *testing.T) { b := loadTarget(t, "./job_cluster_key", "development") - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.JobClusterKeyDefined()) + diags := bundle.Apply(context.Background(), b, validate.JobClusterKeyDefined()) require.Empty(t, diags) } diff --git a/bundle/tests/python_wheel_test.go b/bundle/tests/python_wheel_test.go deleted file mode 100644 index 22702ec44..000000000 --- a/bundle/tests/python_wheel_test.go +++ /dev/null @@ -1,144 +0,0 @@ -package config_tests - -import ( - "context" - "path/filepath" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/libraries" - "github.com/databricks/cli/bundle/phases" - mockfiler "github.com/databricks/cli/internal/mocks/libs/filer" - "github.com/databricks/cli/libs/filer" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" -) - -func TestPythonWheelBuild(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") - require.NoError(t, err) - require.Len(t, matches, 1) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelBuildAutoDetect(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") - require.NoError(t, err) - require.Len(t, matches, 1) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl") - require.NoError(t, err) - require.Len(t, matches, 1) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelWithDBFSLib(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - mockFiler := mockfiler.NewMockFiler(t) - mockFiler.EXPECT().Write( - mock.Anything, - filepath.Join("my_test_code-0.0.1-py3-none-any.whl"), - mock.AnythingOfType("*os.File"), - filer.OverwriteIfExists, - filer.CreateParentDirectories, - ).Return(nil) - - diags = bundle.ApplySeq(ctx, b, - libraries.ExpandGlobReferences(), - libraries.UploadWithClient(mockFiler), - ) - require.NoError(t, diags.Error()) - require.Empty(t, diags) - require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl) -} - -func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) { - b := loadTarget(t, "./python_wheel/environment_key", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl") - require.NoError(t, err) - require.Len(t, matches, 1) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelBuildMultiple(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl") - require.NoError(t, err) - require.Len(t, matches, 2) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} - -func TestPythonWheelNoBuild(t *testing.T) { - b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default") - - ctx := context.Background() - diags := phases.Build(ctx, b) - require.NoError(t, diags.Error()) - - match := libraries.ExpandGlobReferences() - diags = bundle.Apply(ctx, b, match) - require.NoError(t, diags.Error()) -} diff --git a/bundle/tests/sync_include_exclude_no_matches_test.go b/bundle/tests/sync_include_exclude_no_matches_test.go index c206e7471..ed929518a 100644 --- a/bundle/tests/sync_include_exclude_no_matches_test.go +++ b/bundle/tests/sync_include_exclude_no_matches_test.go @@ -16,7 +16,7 @@ import ( func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { b := loadTarget(t, "./sync/override", "development") - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns()) + diags := bundle.Apply(context.Background(), b, validate.ValidateSyncPatterns()) require.Len(t, diags, 3) require.NoError(t, diags.Error()) @@ -46,7 +46,7 @@ func TestSyncIncludeExcludeNoMatchesTest(t *testing.T) { func TestSyncIncludeWithNegate(t *testing.T) { b := loadTarget(t, "./sync/negate", "default") - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns()) + diags := bundle.Apply(context.Background(), b, validate.ValidateSyncPatterns()) require.Empty(t, diags) require.NoError(t, diags.Error()) } @@ -54,7 +54,7 @@ func TestSyncIncludeWithNegate(t *testing.T) { func TestSyncIncludeWithNegateNoMatches(t *testing.T) { b := loadTarget(t, "./sync/negate", "dev") - diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), validate.ValidateSyncPatterns()) + diags := bundle.Apply(context.Background(), b, validate.ValidateSyncPatterns()) require.Len(t, diags, 1) require.NoError(t, diags.Error()) diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 3ada07b74..25475206d 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -25,7 +25,7 @@ type syncFlags struct { } func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { - opts, err := files.GetSyncOptions(cmd.Context(), bundle.ReadOnly(b)) + opts, err := files.GetSyncOptions(cmd.Context(), b) if err != nil { return nil, fmt.Errorf("cannot get sync options: %w", err) } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 0ff9c7867..0a902806f 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -50,7 +50,7 @@ func newValidateCommand() *cobra.Command { } if !diags.HasError() { - diags = diags.Extend(bundle.Apply(ctx, b, validate.Validate())) + diags = diags.Extend(validate.Validate(ctx, b)) } switch root.OutputType(cmd) { diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index dea40f96a..0f4a4aacc 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -33,7 +33,7 @@ func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, args []string, b * return nil, errors.New("SRC and DST are not configurable in the context of a bundle") } - opts, err := files.GetSyncOptions(cmd.Context(), bundle.ReadOnly(b)) + opts, err := files.GetSyncOptions(cmd.Context(), b) if err != nil { return nil, fmt.Errorf("cannot get sync options: %w", err) } diff --git a/integration/bundle/init_default_python_test.go b/integration/bundle/init_default_python_test.go index ca66491ab..9ac8f47bd 100644 --- a/integration/bundle/init_default_python_test.go +++ b/integration/bundle/init_default_python_test.go @@ -31,8 +31,8 @@ var pythonVersionsShort = []string{ } var extraInstalls = map[string][]string{ - "3.12": {"setuptools"}, - "3.13": {"setuptools"}, + "3.12": {"setuptools==75.8.2"}, + "3.13": {"setuptools==75.8.2"}, } func TestDefaultPython(t *testing.T) { diff --git a/integration/bundle/testdata/apps/bundle_deploy.txt b/integration/bundle/testdata/apps/bundle_deploy.txt index 437a55596..55b8dedc6 100644 --- a/integration/bundle/testdata/apps/bundle_deploy.txt +++ b/integration/bundle/testdata/apps/bundle_deploy.txt @@ -2,3 +2,7 @@ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ/files. Deploying resources... Updating deployment state... Deployment complete! +Warning: App config section detected + +remove 'config' from app resource 'test_app' section and use app.yml file in the root of this app instead + diff --git a/integration/bundle/testdata/apps/bundle_validate.txt b/integration/bundle/testdata/apps/bundle_validate.txt index 567fafd24..cb37fc4bd 100644 --- a/integration/bundle/testdata/apps/bundle_validate.txt +++ b/integration/bundle/testdata/apps/bundle_validate.txt @@ -1,7 +1,11 @@ +Warning: App config section detected + +remove 'config' from app resource 'test_app' section and use app.yml file in the root of this app instead + Name: basic Target: default Workspace: User: [USERNAME] Path: /Workspace/Users/[USERNAME]/.bundle/$UNIQUE_PRJ -Validation OK! +Found 1 warning diff --git a/integration/cmd/auth/describe_test.go b/integration/cmd/auth/describe_test.go index f592bc276..66b83b557 100644 --- a/integration/cmd/auth/describe_test.go +++ b/integration/cmd/auth/describe_test.go @@ -2,6 +2,8 @@ package auth_test import ( "context" + "regexp" + "strings" "testing" "github.com/databricks/cli/internal/testcli" @@ -10,8 +12,6 @@ import ( ) func TestAuthDescribeSuccess(t *testing.T) { - t.Skipf("Skipping because of https://github.com/databricks/cli/issues/2010") - ctx := context.Background() stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "auth", "describe") outStr := stdout.String() @@ -20,7 +20,9 @@ func TestAuthDescribeSuccess(t *testing.T) { require.NoError(t, err) require.NotEmpty(t, outStr) - require.Contains(t, outStr, "Host: "+w.Config.Host) + + hostWithoutPrefix := strings.TrimPrefix(w.Config.Host, "https://") + require.Regexp(t, "Host: (?:https://)?"+regexp.QuoteMeta(hostWithoutPrefix), outStr) me, err := w.CurrentUser.Me(context.Background()) require.NoError(t, err) diff --git a/libs/auth/env.go b/libs/auth/env.go index 6b53d22a6..273c70331 100644 --- a/libs/auth/env.go +++ b/libs/auth/env.go @@ -47,6 +47,11 @@ func GetEnvFor(name string) (string, bool) { return "", false } +// EnvVars returns the list of environment variables that the SDK reads to configure +// authentication. +// This is useful for spawning subprocesses since you can unset all auth environment +// variables to clean up the environment before configuring authentication for the +// child process. func envVars() []string { out := []string{} @@ -55,7 +60,7 @@ func envVars() []string { continue } - out = append(out, attr.EnvVars[0]) + out = append(out, attr.EnvVars...) } return out diff --git a/libs/auth/env_test.go b/libs/auth/env_test.go index e04153ed3..5aa893e11 100644 --- a/libs/auth/env_test.go +++ b/libs/auth/env_test.go @@ -3,7 +3,6 @@ package auth import ( "testing" - "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" ) @@ -82,71 +81,49 @@ func TestGetEnvFor(t *testing.T) { } func TestAuthEnvVars(t *testing.T) { - expected := []string{ + // Few common environment variables that we expect the SDK to support. + contains := []string{ + // Generic attributes. "DATABRICKS_HOST", - "DATABRICKS_CLUSTER_ID", - "DATABRICKS_WAREHOUSE_ID", - "DATABRICKS_SERVERLESS_COMPUTE_ID", - "DATABRICKS_METADATA_SERVICE_URL", - "DATABRICKS_ACCOUNT_ID", - "DATABRICKS_TOKEN", - "DATABRICKS_USERNAME", - "DATABRICKS_PASSWORD", "DATABRICKS_CONFIG_PROFILE", + "DATABRICKS_AUTH_TYPE", + "DATABRICKS_METADATA_SERVICE_URL", "DATABRICKS_CONFIG_FILE", + + // OAuth specific attributes. + "DATABRICKS_CLIENT_ID", + "DATABRICKS_CLIENT_SECRET", + "DATABRICKS_CLI_PATH", + + // Google specific attributes. "DATABRICKS_GOOGLE_SERVICE_ACCOUNT", "GOOGLE_CREDENTIALS", + + // Personal access token specific attributes. + "DATABRICKS_TOKEN", + + // Databricks password specific attributes. + "DATABRICKS_USERNAME", + "DATABRICKS_PASSWORD", + + // Account authentication attributes. + "DATABRICKS_ACCOUNT_ID", + + // Azure attributes "DATABRICKS_AZURE_RESOURCE_ID", "ARM_USE_MSI", "ARM_CLIENT_SECRET", "ARM_CLIENT_ID", "ARM_TENANT_ID", + "ARM_ENVIRONMENT", + + // Github attributes "ACTIONS_ID_TOKEN_REQUEST_URL", "ACTIONS_ID_TOKEN_REQUEST_TOKEN", - "ARM_ENVIRONMENT", - "DATABRICKS_AZURE_LOGIN_APP_ID", - "DATABRICKS_CLIENT_ID", - "DATABRICKS_CLIENT_SECRET", - "DATABRICKS_CLI_PATH", - "DATABRICKS_AUTH_TYPE", - "DATABRICKS_DEBUG_TRUNCATE_BYTES", - "DATABRICKS_DEBUG_HEADERS", - "DATABRICKS_RATE_LIMIT", } out := envVars() - assert.Equal(t, expected, out) -} - -func TestAuthProcessEnv(t *testing.T) { - testutil.ClearEnvironment(t) - - // Environment variables that should be inherited by child processes. - t.Setenv("HOME", "/home/user") - t.Setenv("HTTPS_PROXY", "http://proxy.com") - - // Environment variables that should be cleaned up by ProcessEnv(): - t.Setenv("DATABRICKS_HOST", "https://test.com") - t.Setenv("DATABRICKS_TOKEN", "test-token") - t.Setenv("DATABRICKS_PASSWORD", "test-password") - t.Setenv("DATABRICKS_METADATA_SERVICE_URL", "http://somurl.com") - t.Setenv("ARM_USE_MSI", "true") - t.Setenv("ARM_TENANT_ID", "test-tenant-id") - t.Setenv("ARM_CLIENT_ID", "test-client-id") - t.Setenv("ARM_CLIENT_SECRET", "test-client-secret") - - in := &config.Config{ - Host: "https://newhost.com", - Token: "new-token", + for _, v := range contains { + assert.Contains(t, out, v) } - - expected := []string{ - "DATABRICKS_HOST=https://newhost.com", - "DATABRICKS_TOKEN=new-token", - "HOME=/home/user", - "HTTPS_PROXY=http://proxy.com", - } - - out := ProcessEnv(in) - assert.Equal(t, expected, out) } diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index 48b76ce42..7d369a8a0 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -9,6 +9,7 @@ import ( "io" "os" "strings" + "sync" "github.com/databricks/cli/libs/flags" "github.com/manifoldco/promptui" @@ -29,6 +30,8 @@ type Logger struct { // If true, indicates no events have been printed by the logger yet. Used // by inplace logging for formatting isFirstEvent bool + + mutex sync.Mutex } func NewLogger(mode flags.ProgressLogFormat) *Logger { @@ -216,6 +219,8 @@ func (l *Logger) writeInplace(event Event) { } func (l *Logger) Log(event Event) { + l.mutex.Lock() + defer l.mutex.Unlock() switch l.Mode { case flags.ModeInplace: if event.IsInplaceSupported() {