This commit is contained in:
Shreyas Goenka 2025-03-03 19:21:18 +01:00
commit 898b2c1cc3
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
124 changed files with 872 additions and 670 deletions

View File

@ -1,6 +1,10 @@
## Changes ## Changes
<!-- Summary of your changes that are easy to understand --> <!-- Brief summary of your changes that is easy to understand -->
## Why
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
For example, were there any decisions behind the change that are not reflected in the code itself? -->
## Tests ## Tests
<!-- How is this tested? --> <!-- How have you tested the changes? -->

View File

@ -53,7 +53,7 @@ jobs:
go-version-file: go.mod go-version-file: go.mod
- name: Setup Python - name: Setup Python
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
with: with:
python-version: '3.9' python-version: '3.9'
@ -95,7 +95,7 @@ jobs:
# Exit with status code 1 if there are differences (i.e. unformatted files) # Exit with status code 1 if there are differences (i.e. unformatted files)
git diff --exit-code git diff --exit-code
- name: golangci-lint - name: golangci-lint
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0 uses: golangci/golangci-lint-action@2226d7cb06a077cd73e56eedd38eecad18e5d837 # v6.5.0
with: with:
version: v1.63.4 version: v1.63.4
args: --timeout=15m args: --timeout=15m

View File

@ -54,21 +54,21 @@ jobs:
args: release --snapshot --skip docker args: release --snapshot --skip docker
- name: Upload macOS binaries - name: Upload macOS binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with: with:
name: cli_darwin_snapshot name: cli_darwin_snapshot
path: | path: |
dist/*_darwin_*/ dist/*_darwin_*/
- name: Upload Linux binaries - name: Upload Linux binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with: with:
name: cli_linux_snapshot name: cli_linux_snapshot
path: | path: |
dist/*_linux_*/ dist/*_linux_*/
- name: Upload Windows binaries - name: Upload Windows binaries
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
with: with:
name: cli_windows_snapshot name: cli_windows_snapshot
path: | path: |

View File

@ -46,7 +46,7 @@ jobs:
# QEMU is required to build cross platform docker images using buildx. # QEMU is required to build cross platform docker images using buildx.
# It allows virtualization of the CPU architecture at the application level. # It allows virtualization of the CPU architecture at the application level.
- name: Set up QEMU dependency - name: Set up QEMU dependency
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0 uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Run GoReleaser - name: Run GoReleaser
id: releaser id: releaser

View File

@ -390,6 +390,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
if _, ok := Ignored[relPath]; ok { if _, ok := Ignored[relPath]; ok {
continue continue
} }
if config.CompiledIgnoreObject.MatchesPath(relPath) {
continue
}
unexpected = append(unexpected, relPath) unexpected = append(unexpected, relPath)
if strings.HasPrefix(relPath, "out") { if strings.HasPrefix(relPath, "out") {
// We have a new file starting with "out" // We have a new file starting with "out"

36
acceptance/bin/find.py Executable file
View File

@ -0,0 +1,36 @@
#!/usr/bin/env python3
"""
Usage: find.py <regex>
Finds all files within current directory matching regex. The output is sorted and slashes are always forward.
If --expect N is provided, the number of matches must be N or error is printed.
"""
import sys
import os
import re
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("regex")
parser.add_argument("--expect", type=int)
args = parser.parse_args()
regex = re.compile(args.regex)
result = []
for root, dirs, files in os.walk("."):
for filename in files:
path = os.path.join(root, filename).lstrip("./\\").replace("\\", "/")
if regex.search(path):
result.append(path)
result.sort()
for item in result:
print(item)
sys.stdout.flush()
if args.expect is not None:
if args.expect != len(result):
sys.exit(f"Expected {args.expect}, got {len(result)}")

View File

@ -0,0 +1,3 @@
command:
- python
- app.py

View File

@ -0,0 +1,8 @@
bundle:
name: apps_yaml
resources:
apps:
myapp:
name: myapp
source_code_path: ./app

View File

@ -0,0 +1,5 @@
{
"method": "POST",
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml",
"raw_body": "command:\n - python\n - app.py\n"
}

View File

@ -0,0 +1,15 @@
>>> [CLI] bundle validate
Name: apps_yaml
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default
Validation OK!
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate
trace $CLI bundle deploy
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml")' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt
rm out.requests.txt

View File

@ -0,0 +1 @@
print("Hello world!")

View File

@ -0,0 +1,12 @@
bundle:
name: apps_config_section
resources:
apps:
myapp:
name: myapp
source_code_path: ./app
config:
command:
- python
- app.py

View File

@ -0,0 +1,5 @@
{
"method": "POST",
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml",
"raw_body": "command:\n - python\n - app.py\n"
}

View File

@ -0,0 +1,23 @@
>>> [CLI] bundle validate
Warning: App config section detected
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
Name: apps_config_section
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default
Found 1 warning
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
Warning: App config section detected
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead

View File

@ -0,0 +1,4 @@
trace $CLI bundle validate
trace $CLI bundle deploy
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml")' out.requests.txt > out.app.yml.txt
rm out.requests.txt

View File

@ -0,0 +1,26 @@
Cloud = false
RecordRequests = true
Ignore = [
'.databricks',
]
[[Server]]
Pattern = "POST /api/2.0/apps"
[[Server]]
Pattern = "GET /api/2.0/apps/myapp"
Response.Body = '''
{
"name": "myapp",
"description": "",
"compute_status": {
"state": "ACTIVE",
"message": "App compute is active."
},
"app_status": {
"state": "RUNNING",
"message": "Application is running."
}
}
'''

View File

@ -1,3 +1,5 @@
RecordRequests = false
[[Repls]] [[Repls]]
Old = '\\' Old = '\\'
New = '/' New = '/'

View File

@ -0,0 +1,9 @@
export PYTHONDONTWRITEBYTECODE=1
uv venv -q --python 3.12 .venv
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
source .venv/Scripts/activate
else
source .venv/bin/activate
fi
uv pip install -q setuptools

View File

@ -0,0 +1,18 @@
Cloud = false
RecordRequests = true
Ignore = [
'.venv',
'dist',
'build',
'*egg-info',
'.databricks',
]
[[Server]]
Pattern = "GET /api/2.1/clusters/get"
Response.Body = '''
{
"cluster_id": "0717-132531-5opeqon1",
"spark_version": "13.3.x-scala2.12"
}
'''

View File

@ -2,8 +2,8 @@
>>> errcode [CLI] bundle deploy >>> errcode [CLI] bundle deploy
Building whl1... Building whl1...
Building whl2... Building whl2...
Uploading [package name] Uploading [package name]...
Uploading [package name] Uploading [package name]...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files... Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files...
Deploying resources... Deploying resources...
Updating deployment state... Updating deployment state...

View File

@ -1,6 +1,6 @@
Cloud = false RecordRequests = false
# The order in which files are uploaded can be different, so we just replace the name # The order in which files are uploaded can be different, so we just replace the name
[[Repls]] [[Repls]]
Old="Uploading .*-0.0.1-py3-none-any.whl..." Old="Uploading (my_package|my_other_package)-0.0.1-py3-none-any.whl"
New="Uploading [package name]" New="Uploading [package name]"

View File

@ -0,0 +1,32 @@
>>> [CLI] bundle deploy
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
=== Expecting to find no wheels
>>> errcode find.py --expect 0 whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "dbfs:/path/to/dist/mywheel.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting no wheels to be uploaded
>>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl
Exit code: 1

View File

@ -0,0 +1,12 @@
trace $CLI bundle deploy
title "Expecting to find no wheels"
trace errcode find.py --expect 0 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting no wheels to be uploaded"
trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl'
rm out.requests.txt

View File

@ -5,7 +5,8 @@ artifacts:
my_test_code: my_test_code:
type: whl type: whl
path: "./my_test_code" path: "./my_test_code"
build: "python3 setup.py bdist_wheel" # using 'python' there because 'python3' does not exist in virtualenv on windows
build: python setup.py bdist_wheel
resources: resources:
jobs: jobs:

View File

@ -0,0 +1,34 @@
>>> [CLI] bundle deploy
Building my_test_code...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,34 @@
>>> [CLI] bundle deploy
Building python_artifact...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,46 @@
>>> [CLI] bundle deploy
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
package/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
{
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"max_concurrent_runs": 1,
"name": "[default] My Wheel Job",
"queue": {
"enabled": true
},
"tasks": [
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
}
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files/package/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -0,0 +1,33 @@
>>> [CLI] bundle deploy
Building python_artifact...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-aaaaa-bbbbbb",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
}
],
"notebook_task": {
"notebook_path": "/notebook.py"
},
"task_key": "TestTask"
}
]
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -5,11 +5,11 @@ artifacts:
my_test_code: my_test_code:
type: whl type: whl
path: "./my_test_code" path: "./my_test_code"
build: "python3 setup.py bdist_wheel" build: "python setup.py bdist_wheel"
my_test_code_2: my_test_code_2:
type: whl type: whl
path: "./my_test_code" path: "./my_test_code"
build: "python3 setup2.py bdist_wheel" build: "python setup2.py bdist_wheel"
resources: resources:
jobs: jobs:

View File

@ -0,0 +1,42 @@
>>> [CLI] bundle deploy
Building my_test_code...
Building my_test_code_2...
Deploying resources...
Deployment complete!
Updating deployment state...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading my_test_code_2-0.0.1-py3-none-any.whl...
>>> find.py --expect 2 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl
=== Expecting 2 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
},
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 2 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
trace find.py --expect 2 whl
title "Expecting 2 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 2 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm -fr out.requests.txt

View File

@ -13,4 +13,4 @@ resources:
entry_point: "run" entry_point: "run"
libraries: libraries:
- whl: ./dist/*.whl - whl: ./dist/*.whl
- whl: ./dist/lib/my_test_code-0.0.1-py3-none-any.whl - whl: ./dist/lib/other_test_code-0.0.1-py3-none-any.whl

View File

@ -0,0 +1,45 @@
>>> find.py --expect 2 whl
dist/lib/other_test_code-0.0.1-py3-none-any.whl
dist/my_test_code-0.0.1-py3-none-any.whl
>>> [CLI] bundle deploy
Deploying resources...
Deployment complete!
Updating deployment state...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading other_test_code-0.0.1-py3-none-any.whl...
=== Expecting to find 2 wheels, same as initially provided
>>> find.py --expect 2 whl
dist/lib/other_test_code-0.0.1-py3-none-any.whl
dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 2 wheels in libraries section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
[
{
"existing_cluster_id": "0717-132531-5opeqon1",
"libraries": [
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
},
{
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
}
],
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
=== Expecting 2 wheels to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,14 @@
trace find.py --expect 2 whl
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
title "Expecting to find 2 wheels, same as initially provided"
trace find.py --expect 2 whl
title "Expecting 2 wheels in libraries section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
title "Expecting 2 wheels to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -5,7 +5,7 @@ artifacts:
my_test_code: my_test_code:
type: whl type: whl
path: "./my_test_code" path: "./my_test_code"
build: "python3 setup.py bdist_wheel" build: python setup.py bdist_wheel
resources: resources:
jobs: jobs:

View File

@ -0,0 +1,54 @@
>>> [CLI] bundle deploy
Building my_test_code...
Uploading my_test_code-0.0.1-py3-none-any.whl...
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/environment_key/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!
>>> find.py --expect 1 whl
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
=== Expecting 1 wheel in environments section in /jobs/create
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
{
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/environment_key/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"environments": [
{
"environment_key": "test_env",
"spec": {
"client": "1",
"dependencies": [
"/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
]
}
}
],
"format": "MULTI_TASK",
"max_concurrent_runs": 1,
"name": "My Wheel Job",
"queue": {
"enabled": true
},
"tasks": [
{
"environment_key": "test_env",
"existing_cluster_id": "0717-132531-5opeqon1",
"python_wheel_task": {
"entry_point": "run",
"package_name": "my_test_code"
},
"task_key": "TestTask"
}
]
}
=== Expecting 1 wheel to be uploaded
>>> jq .path
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"

View File

@ -0,0 +1,11 @@
trace $CLI bundle deploy
trace find.py --expect 1 whl
title "Expecting 1 wheel in environments section in /jobs/create"
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
title "Expecting 1 wheel to be uploaded"
trace jq .path < out.requests.txt | grep import | grep whl | sort
rm out.requests.txt

View File

@ -1,15 +0,0 @@
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true

View File

@ -72,18 +72,30 @@
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize 10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit 10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit 10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate 10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly)
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:files_to_sync
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:folder_permissions
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:validate_sync_patterns
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_cluster_key_defined
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_task_cluster_spec
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:SingleNodeCluster
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:artifact_paths
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found < HTTP/1.1 404 Not Found
< { < {
< "message": "Workspace path not found" < "message": "Workspace path not found"
< } pid=12345 mutator=validate:files_to_sync sdk=true
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate:files_to_sync sdk=true
10:07:59 Debug: POST /api/2.0/workspace/mkdirs 10:07:59 Debug: POST /api/2.0/workspace/mkdirs
> { > {
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files" > "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
> } > }
< HTTP/1.1 200 OK pid=12345 mutator=validate:files_to_sync sdk=true
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 200 OK < HTTP/1.1 200 OK
< { < {
< "object_type": "DIRECTORY", < "object_type": "DIRECTORY",
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files" < "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
< } pid=12345 mutator=validate:files_to_sync sdk=true
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate:files_to_sync
10:07:59 Info: completed execution pid=12345 exit_code=0 10:07:59 Info: completed execution pid=12345 exit_code=0

View File

@ -1,4 +1 @@
$CLI bundle validate --debug 2> full.stderr.txt $CLI bundle validate --debug 2> out.stderr.txt
grep -vw parallel full.stderr.txt > out.stderr.txt
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
rm full.stderr.txt

View File

@ -2,10 +2,8 @@ Warning: Include section is defined outside root file
at include at include
in a.yml:2:3 in a.yml:2:3
The include section is defined in a file that is not the root file. An include section is defined in a file that is not databricks.yml.
These values will be ignored because only the includes defined in Only includes defined in databricks.yml are applied.
the bundle root file (that is databricks.yml or databricks.yaml)
are loaded.
Name: include_outside_root Name: include_outside_root
Target: default Target: default

View File

@ -11,6 +11,7 @@ import (
"github.com/BurntSushi/toml" "github.com/BurntSushi/toml"
"github.com/databricks/cli/libs/testdiff" "github.com/databricks/cli/libs/testdiff"
"github.com/databricks/cli/libs/testserver" "github.com/databricks/cli/libs/testserver"
ignore "github.com/sabhiram/go-gitignore"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -51,6 +52,11 @@ type TestConfig struct {
// List of request headers to include when recording requests. // List of request headers to include when recording requests.
IncludeRequestHeaders []string IncludeRequestHeaders []string
// List of gitignore patterns to ignore when checking output files
Ignore []string
CompiledIgnoreObject *ignore.GitIgnore
} }
type ServerStub struct { type ServerStub struct {
@ -111,6 +117,8 @@ func LoadConfig(t *testing.T, dir string) (TestConfig, string) {
} }
} }
result.CompiledIgnoreObject = ignore.CompileIgnoreLines(result.Ignore...)
return result, strings.Join(configs, ", ") return result, strings.Join(configs, ", ")
} }

View File

@ -27,3 +27,7 @@ echo 123456
printf "\n=== Testing --version" printf "\n=== Testing --version"
trace $CLI --version trace $CLI --version
touch ignored_file.txt
mkdir ignored_dir
touch ignored_dir/hello.txt

View File

@ -1,5 +1,8 @@
# Badness = "Brief description of what's wrong with the test output, if anything" # Badness = "Brief description of what's wrong with the test output, if anything"
Ignore = ['ignore*']
#[GOOS] #[GOOS]
# Disable on Windows # Disable on Windows
#windows = false #windows = false

View File

@ -3,8 +3,6 @@ package apps
import ( import (
"context" "context"
"fmt" "fmt"
"path"
"strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
@ -14,7 +12,6 @@ type validate struct{}
func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics var diags diag.Diagnostics
possibleConfigFiles := []string{"app.yml", "app.yaml"}
usedSourceCodePaths := make(map[string]string) usedSourceCodePaths := make(map[string]string)
for key, app := range b.Config.Resources.Apps { for key, app := range b.Config.Resources.Apps {
@ -28,18 +25,14 @@ func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics
} }
usedSourceCodePaths[app.SourceCodePath] = key usedSourceCodePaths[app.SourceCodePath] = key
for _, configFile := range possibleConfigFiles { if app.Config != nil {
appPath := strings.TrimPrefix(app.SourceCodePath, b.Config.Workspace.FilePath)
cf := path.Join(appPath, configFile)
if _, err := b.SyncRoot.Stat(cf); err == nil {
diags = append(diags, diag.Diagnostic{ diags = append(diags, diag.Diagnostic{
Severity: diag.Error, Severity: diag.Warning,
Summary: configFile + " detected", Summary: "App config section detected",
Detail: fmt.Sprintf("remove %s and use 'config' property for app resource '%s' instead", cf, app.Name), Detail: fmt.Sprintf("remove 'config' from app resource '%s' section and use app.yml file in the root of this app instead", key),
}) })
} }
} }
}
return diags return diags
} }

View File

@ -17,46 +17,6 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestAppsValidate(t *testing.T) {
tmpDir := t.TempDir()
testutil.Touch(t, tmpDir, "app1", "app.yml")
testutil.Touch(t, tmpDir, "app2", "app.py")
b := &bundle.Bundle{
BundleRootPath: tmpDir,
SyncRootPath: tmpDir,
SyncRoot: vfs.MustNew(tmpDir),
Config: config.Root{
Workspace: config.Workspace{
FilePath: "/foo/bar/",
},
Resources: config.Resources{
Apps: map[string]*resources.App{
"app1": {
App: &apps.App{
Name: "app1",
},
SourceCodePath: "./app1",
},
"app2": {
App: &apps.App{
Name: "app2",
},
SourceCodePath: "./app2",
},
},
},
},
}
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate())
require.Len(t, diags, 1)
require.Equal(t, "app.yml detected", diags[0].Summary)
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
}
func TestAppsValidateSameSourcePath(t *testing.T) { func TestAppsValidateSameSourcePath(t *testing.T) {
tmpDir := t.TempDir() tmpDir := t.TempDir()
testutil.Touch(t, tmpDir, "app1", "app.py") testutil.Touch(t, tmpDir, "app1", "app.py")

View File

@ -1,49 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/vfs"
"github.com/databricks/databricks-sdk-go"
)
type ReadOnlyBundle struct {
b *Bundle
}
func ReadOnly(b *Bundle) ReadOnlyBundle {
return ReadOnlyBundle{b: b}
}
func (r ReadOnlyBundle) Config() config.Root {
return r.b.Config
}
func (r ReadOnlyBundle) RootPath() string {
return r.b.BundleRootPath
}
func (r ReadOnlyBundle) BundleRoot() vfs.Path {
return r.b.BundleRoot
}
func (r ReadOnlyBundle) SyncRoot() vfs.Path {
return r.b.SyncRoot
}
func (r ReadOnlyBundle) WorktreeRoot() vfs.Path {
return r.b.WorktreeRoot
}
func (r ReadOnlyBundle) WorkspaceClient() *databricks.WorkspaceClient {
return r.b.WorkspaceClient()
}
func (r ReadOnlyBundle) CacheDir(ctx context.Context, paths ...string) (string, error) {
return r.b.CacheDir(ctx, paths...)
}
func (r ReadOnlyBundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) {
return r.b.GetSyncIncludePatterns(ctx)
}

View File

@ -165,10 +165,8 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
diags = diags.Append(diag.Diagnostic{ diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning, Severity: diag.Warning,
Summary: "Include section is defined outside root file", Summary: "Include section is defined outside root file",
Detail: `The include section is defined in a file that is not the root file. Detail: `An include section is defined in a file that is not databricks.yml.
These values will be ignored because only the includes defined in Only includes defined in databricks.yml are applied.`,
the bundle root file (that is databricks.yml or databricks.yaml)
are loaded.`,
Locations: this.GetLocations("include"), Locations: this.GetLocations("include"),
Paths: []dyn.Path{dyn.MustPathFromString("include")}, Paths: []dyn.Path{dyn.MustPathFromString("include")},
}) })

View File

@ -14,18 +14,18 @@ import (
// 2. The validation is blocking for bundle deployments. // 2. The validation is blocking for bundle deployments.
// //
// The full suite of validation mutators is available in the [Validate] mutator. // The full suite of validation mutators is available in the [Validate] mutator.
type fastValidateReadonly struct{} type fastValidate struct{ bundle.RO }
func FastValidateReadonly() bundle.ReadOnlyMutator { func FastValidate() bundle.ReadOnlyMutator {
return &fastValidateReadonly{} return &fastValidate{}
} }
func (f *fastValidateReadonly) Name() string { func (f *fastValidate) Name() string {
return "fast_validate(readonly)" return "fast_validate(readonly)"
} }
func (f *fastValidateReadonly) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (f *fastValidate) Apply(ctx context.Context, rb *bundle.Bundle) diag.Diagnostics {
return bundle.ApplyReadOnly(ctx, rb, bundle.Parallel( return bundle.ApplyParallel(ctx, rb,
// Fast mutators with only in-memory checks // Fast mutators with only in-memory checks
JobClusterKeyDefined(), JobClusterKeyDefined(),
JobTaskClusterSpec(), JobTaskClusterSpec(),
@ -33,19 +33,5 @@ func (f *fastValidateReadonly) Apply(ctx context.Context, rb bundle.ReadOnlyBund
// Blocking mutators. Deployments will fail if these checks fail. // Blocking mutators. Deployments will fail if these checks fail.
ValidateArtifactPath(), ValidateArtifactPath(),
)) )
}
type fastValidate struct{}
func FastValidate() bundle.Mutator {
return &fastValidate{}
}
func (f *fastValidate) Name() string {
return "fast_validate"
}
func (f *fastValidate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), FastValidateReadonly())
} }

View File

@ -13,20 +13,20 @@ func FilesToSync() bundle.ReadOnlyMutator {
return &filesToSync{} return &filesToSync{}
} }
type filesToSync struct{} type filesToSync struct{ bundle.RO }
func (v *filesToSync) Name() string { func (v *filesToSync) Name() string {
return "validate:files_to_sync" return "validate:files_to_sync"
} }
func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (v *filesToSync) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// The user may be intentional about not synchronizing any files. // The user may be intentional about not synchronizing any files.
// In this case, we should not show any warnings. // In this case, we should not show any warnings.
if len(rb.Config().Sync.Paths) == 0 { if len(b.Config.Sync.Paths) == 0 {
return nil return nil
} }
sync, err := files.GetSync(ctx, rb) sync, err := files.GetSync(ctx, b)
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }
@ -42,20 +42,20 @@ func (v *filesToSync) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.
} }
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
if len(rb.Config().Sync.Exclude) == 0 { if len(b.Config.Sync.Exclude) == 0 {
diags = diags.Append(diag.Diagnostic{ diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning, Severity: diag.Warning,
Summary: "There are no files to sync, please check your .gitignore", Summary: "There are no files to sync, please check your .gitignore",
}) })
} else { } else {
loc := location{path: "sync.exclude", rb: rb} path := "sync.exclude"
diags = diags.Append(diag.Diagnostic{ diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning, Severity: diag.Warning,
Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration", Summary: "There are no files to sync, please check your .gitignore and sync.exclude configuration",
// Show all locations where sync.exclude is defined, since merging // Show all locations where sync.exclude is defined, since merging
// sync.exclude is additive. // sync.exclude is additive.
Locations: loc.Locations(), Locations: b.Config.GetLocations(path),
Paths: []dyn.Path{loc.Path()}, Paths: []dyn.Path{dyn.MustPathFromString(path)},
}) })
} }

View File

@ -29,8 +29,7 @@ func TestFilesToSync_NoPaths(t *testing.T) {
} }
ctx := context.Background() ctx := context.Background()
rb := bundle.ReadOnly(b) diags := FilesToSync().Apply(ctx, b)
diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync())
assert.Empty(t, diags) assert.Empty(t, diags)
} }
@ -85,8 +84,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) {
testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n") testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n")
ctx := context.Background() ctx := context.Background()
rb := bundle.ReadOnly(b) diags := FilesToSync().Apply(ctx, b)
diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync())
require.Len(t, diags, 1) require.Len(t, diags, 1)
assert.Equal(t, diag.Warning, diags[0].Severity) assert.Equal(t, diag.Warning, diags[0].Severity)
assert.Equal(t, "There are no files to sync, please check your .gitignore", diags[0].Summary) assert.Equal(t, "There are no files to sync, please check your .gitignore", diags[0].Summary)
@ -99,8 +97,7 @@ func TestFilesToSync_EverythingExcluded(t *testing.T) {
b.Config.Sync.Exclude = []string{"*"} b.Config.Sync.Exclude = []string{"*"}
ctx := context.Background() ctx := context.Background()
rb := bundle.ReadOnly(b) diags := FilesToSync().Apply(ctx, b)
diags := bundle.ApplyReadOnly(ctx, rb, FilesToSync())
require.Len(t, diags, 1) require.Len(t, diags, 1)
assert.Equal(t, diag.Warning, diags[0].Severity) assert.Equal(t, diag.Warning, diags[0].Severity)
assert.Equal(t, "There are no files to sync, please check your .gitignore and sync.exclude configuration", diags[0].Summary) assert.Equal(t, "There are no files to sync, please check your .gitignore and sync.exclude configuration", diags[0].Summary)

View File

@ -16,15 +16,14 @@ import (
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
type folderPermissions struct{} type folderPermissions struct{ bundle.RO }
// Apply implements bundle.ReadOnlyMutator. func (f *folderPermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics { if len(b.Config.Permissions) == 0 {
if len(b.Config().Permissions) == 0 {
return nil return nil
} }
bundlePaths := paths.CollectUniqueWorkspacePathPrefixes(b.Config().Workspace) bundlePaths := paths.CollectUniqueWorkspacePathPrefixes(b.Config.Workspace)
var diags diag.Diagnostics var diags diag.Diagnostics
g, ctx := errgroup.WithContext(ctx) g, ctx := errgroup.WithContext(ctx)
@ -48,7 +47,7 @@ func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle)
return diags return diags
} }
func checkFolderPermission(ctx context.Context, b bundle.ReadOnlyBundle, folderPath string) diag.Diagnostics { func checkFolderPermission(ctx context.Context, b *bundle.Bundle, folderPath string) diag.Diagnostics {
// If the folder is shared, then we don't need to check permissions as it was already checked in the other mutator before. // If the folder is shared, then we don't need to check permissions as it was already checked in the other mutator before.
if libraries.IsWorkspaceSharedPath(folderPath) { if libraries.IsWorkspaceSharedPath(folderPath) {
return nil return nil
@ -69,7 +68,7 @@ func checkFolderPermission(ctx context.Context, b bundle.ReadOnlyBundle, folderP
} }
p := permissions.ObjectAclToResourcePermissions(folderPath, objPermissions.AccessControlList) p := permissions.ObjectAclToResourcePermissions(folderPath, objPermissions.AccessControlList)
return p.Compare(b.Config().Permissions) return p.Compare(b.Config.Permissions)
} }
func getClosestExistingObject(ctx context.Context, w workspace.WorkspaceInterface, folderPath string) (*workspace.ObjectInfo, error) { func getClosestExistingObject(ctx context.Context, w workspace.WorkspaceInterface, folderPath string) (*workspace.ObjectInfo, error) {

View File

@ -69,9 +69,7 @@ func TestFolderPermissionsInheritedWhenRootPathDoesNotExist(t *testing.T) {
}, nil) }, nil)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
rb := bundle.ReadOnly(b) diags := ValidateFolderPermissions().Apply(context.Background(), b)
diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions())
require.Empty(t, diags) require.Empty(t, diags)
} }
@ -118,9 +116,7 @@ func TestValidateFolderPermissionsFailsOnMissingBundlePermission(t *testing.T) {
}, nil) }, nil)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
rb := bundle.ReadOnly(b) diags := ValidateFolderPermissions().Apply(context.Background(), b)
diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary) require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary)
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)
@ -164,9 +160,7 @@ func TestValidateFolderPermissionsFailsOnPermissionMismatch(t *testing.T) {
}, nil) }, nil)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
rb := bundle.ReadOnly(b) diags := ValidateFolderPermissions().Apply(context.Background(), b)
diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary) require.Equal(t, "untracked permissions apply to target workspace path", diags[0].Summary)
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)
@ -199,9 +193,7 @@ func TestValidateFolderPermissionsFailsOnNoRootFolder(t *testing.T) {
}) })
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
rb := bundle.ReadOnly(b) diags := ValidateFolderPermissions().Apply(context.Background(), b)
diags := bundle.ApplyReadOnly(context.Background(), rb, ValidateFolderPermissions())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "folder / and its parent folders do not exist", diags[0].Summary) require.Equal(t, "folder / and its parent folders do not exist", diags[0].Summary)
require.Equal(t, diag.Error, diags[0].Severity) require.Equal(t, diag.Error, diags[0].Severity)

View File

@ -24,7 +24,10 @@ func (f *noInterpolationInAuthConfig) Name() string {
func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
authFields := []string{ authFields := []string{
// Generic attributes. // Generic attributes.
"host", "profile", "auth_type", "metadata_service_url", "host",
"profile",
"auth_type",
"metadata_service_url",
// OAuth specific attributes. // OAuth specific attributes.
"client_id", "client_id",
@ -33,8 +36,12 @@ func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundl
"google_service_account", "google_service_account",
// Azure specific attributes. // Azure specific attributes.
"azure_resource_id", "azure_use_msi", "azure_client_id", "azure_tenant_id", "azure_resource_id",
"azure_environment", "azure_login_app_id", "azure_use_msi",
"azure_client_id",
"azure_tenant_id",
"azure_environment",
"azure_login_app_id",
} }
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
@ -49,12 +56,11 @@ func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundl
return diag.FromErr(err) return diag.FromErr(err)
} }
if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { vv, ok := v.AsString()
if !ok {
continue continue
} }
vv := v.MustString()
// Check if the field contains interpolation. // Check if the field contains interpolation.
if dynvar.ContainsVariableReference(vv) { if dynvar.ContainsVariableReference(vv) {
envVar, ok := auth.GetEnvFor(fieldName) envVar, ok := auth.GetEnvFor(fieldName)

View File

@ -13,16 +13,16 @@ func JobClusterKeyDefined() bundle.ReadOnlyMutator {
return &jobClusterKeyDefined{} return &jobClusterKeyDefined{}
} }
type jobClusterKeyDefined struct{} type jobClusterKeyDefined struct{ bundle.RO }
func (v *jobClusterKeyDefined) Name() string { func (v *jobClusterKeyDefined) Name() string {
return "validate:job_cluster_key_defined" return "validate:job_cluster_key_defined"
} }
func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (v *jobClusterKeyDefined) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
for k, job := range rb.Config().Resources.Jobs { for k, job := range b.Config.Resources.Jobs {
jobClusterKeys := make(map[string]bool) jobClusterKeys := make(map[string]bool)
for _, cluster := range job.JobClusters { for _, cluster := range job.JobClusters {
if cluster.JobClusterKey != "" { if cluster.JobClusterKey != "" {
@ -33,10 +33,7 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund
for index, task := range job.Tasks { for index, task := range job.Tasks {
if task.JobClusterKey != "" { if task.JobClusterKey != "" {
if _, ok := jobClusterKeys[task.JobClusterKey]; !ok { if _, ok := jobClusterKeys[task.JobClusterKey]; !ok {
loc := location{ path := fmt.Sprintf("resources.jobs.%s.tasks[%d].job_cluster_key", k, index)
path: fmt.Sprintf("resources.jobs.%s.tasks[%d].job_cluster_key", k, index),
rb: rb,
}
diags = diags.Append(diag.Diagnostic{ diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning, Severity: diag.Warning,
@ -44,8 +41,8 @@ func (v *jobClusterKeyDefined) Apply(ctx context.Context, rb bundle.ReadOnlyBund
// Show only the location where the job_cluster_key is defined. // Show only the location where the job_cluster_key is defined.
// Other associated locations are not relevant since they are // Other associated locations are not relevant since they are
// overridden during merging. // overridden during merging.
Locations: []dyn.Location{loc.Location()}, Locations: b.Config.GetLocations(path),
Paths: []dyn.Path{loc.Path()}, Paths: []dyn.Path{dyn.MustPathFromString(path)},
}) })
} }
} }

View File

@ -33,7 +33,7 @@ func TestJobClusterKeyDefined(t *testing.T) {
}, },
} }
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) diags := JobClusterKeyDefined().Apply(context.Background(), b)
require.Empty(t, diags) require.Empty(t, diags)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
} }
@ -56,7 +56,7 @@ func TestJobClusterKeyNotDefined(t *testing.T) {
}, },
} }
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) diags := JobClusterKeyDefined().Apply(context.Background(), b)
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)
@ -89,7 +89,7 @@ func TestJobClusterKeyDefinedInDifferentJob(t *testing.T) {
}, },
} }
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobClusterKeyDefined()) diags := JobClusterKeyDefined().Apply(context.Background(), b)
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)

View File

@ -17,31 +17,31 @@ func JobTaskClusterSpec() bundle.ReadOnlyMutator {
return &jobTaskClusterSpec{} return &jobTaskClusterSpec{}
} }
type jobTaskClusterSpec struct{} type jobTaskClusterSpec struct{ bundle.RO }
func (v *jobTaskClusterSpec) Name() string { func (v *jobTaskClusterSpec) Name() string {
return "validate:job_task_cluster_spec" return "validate:job_task_cluster_spec"
} }
func (v *jobTaskClusterSpec) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (v *jobTaskClusterSpec) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs")) jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
for resourceName, job := range rb.Config().Resources.Jobs { for resourceName, job := range b.Config.Resources.Jobs {
resourcePath := jobsPath.Append(dyn.Key(resourceName)) resourcePath := jobsPath.Append(dyn.Key(resourceName))
for taskIndex, task := range job.Tasks { for taskIndex, task := range job.Tasks {
taskPath := resourcePath.Append(dyn.Key("tasks"), dyn.Index(taskIndex)) taskPath := resourcePath.Append(dyn.Key("tasks"), dyn.Index(taskIndex))
diags = diags.Extend(validateJobTask(rb, task, taskPath)) diags = diags.Extend(validateJobTask(b, task, taskPath))
} }
} }
return diags return diags
} }
func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path) diag.Diagnostics { func validateJobTask(b *bundle.Bundle, task jobs.Task, taskPath dyn.Path) diag.Diagnostics {
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
var specified []string var specified []string
@ -74,7 +74,7 @@ func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path
if task.ForEachTask != nil { if task.ForEachTask != nil {
forEachTaskPath := taskPath.Append(dyn.Key("for_each_task"), dyn.Key("task")) forEachTaskPath := taskPath.Append(dyn.Key("for_each_task"), dyn.Key("task"))
diags = diags.Extend(validateJobTask(rb, task.ForEachTask.Task, forEachTaskPath)) diags = diags.Extend(validateJobTask(b, task.ForEachTask.Task, forEachTaskPath))
} }
if isComputeTask(task) && len(specified) == 0 { if isComputeTask(task) && len(specified) == 0 {
@ -92,7 +92,7 @@ func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path
Severity: diag.Error, Severity: diag.Error,
Summary: "Missing required cluster or environment settings", Summary: "Missing required cluster or environment settings",
Detail: detail, Detail: detail,
Locations: rb.Config().GetLocations(taskPath.String()), Locations: b.Config.GetLocations(taskPath.String()),
Paths: []dyn.Path{taskPath}, Paths: []dyn.Path{taskPath},
}) })
} }

View File

@ -174,7 +174,7 @@ Specify one of the following fields: job_cluster_key, environment_key, existing_
} }
b := createBundle(map[string]*resources.Job{"job1": job}) b := createBundle(map[string]*resources.Job{"job1": job})
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobTaskClusterSpec()) diags := JobTaskClusterSpec().Apply(context.Background(), b)
if tc.errorPath != "" || tc.errorDetail != "" || tc.errorSummary != "" { if tc.errorPath != "" || tc.errorDetail != "" || tc.errorSummary != "" {
assert.Len(t, diags, 1) assert.Len(t, diags, 1)

View File

@ -16,7 +16,7 @@ func SingleNodeCluster() bundle.ReadOnlyMutator {
return &singleNodeCluster{} return &singleNodeCluster{}
} }
type singleNodeCluster struct{} type singleNodeCluster struct{ bundle.RO }
func (m *singleNodeCluster) Name() string { func (m *singleNodeCluster) Name() string {
return "validate:SingleNodeCluster" return "validate:SingleNodeCluster"
@ -98,7 +98,7 @@ func showSingleNodeClusterWarning(ctx context.Context, v dyn.Value) bool {
return false return false
} }
func (m *singleNodeCluster) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (m *singleNodeCluster) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags := diag.Diagnostics{} diags := diag.Diagnostics{}
patterns := []dyn.Pattern{ patterns := []dyn.Pattern{
@ -115,7 +115,7 @@ func (m *singleNodeCluster) Apply(ctx context.Context, rb bundle.ReadOnlyBundle)
} }
for _, p := range patterns { for _, p := range patterns {
_, err := dyn.MapByPattern(rb.Config().Value(), p, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { _, err := dyn.MapByPattern(b.Config.Value(), p, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
warning := diag.Diagnostic{ warning := diag.Diagnostic{
Severity: diag.Warning, Severity: diag.Warning,
Summary: singleNodeWarningSummary, Summary: singleNodeWarningSummary,

View File

@ -116,7 +116,7 @@ func TestValidateSingleNodeClusterFailForInteractiveClusters(t *testing.T) {
bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) { bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) {
return dyn.Set(v, "resources.clusters.foo.num_workers", dyn.V(0)) return dyn.Set(v, "resources.clusters.foo.num_workers", dyn.V(0))
}) })
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := SingleNodeCluster().Apply(ctx, b)
assert.Equal(t, diag.Diagnostics{ assert.Equal(t, diag.Diagnostics{
{ {
Severity: diag.Warning, Severity: diag.Warning,
@ -165,7 +165,7 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) {
return dyn.Set(v, "resources.jobs.foo.job_clusters[0].new_cluster.num_workers", dyn.V(0)) return dyn.Set(v, "resources.jobs.foo.job_clusters[0].new_cluster.num_workers", dyn.V(0))
}) })
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := SingleNodeCluster().Apply(ctx, b)
assert.Equal(t, diag.Diagnostics{ assert.Equal(t, diag.Diagnostics{
{ {
Severity: diag.Warning, Severity: diag.Warning,
@ -214,7 +214,7 @@ func TestValidateSingleNodeClusterFailForJobTaskClusters(t *testing.T) {
return dyn.Set(v, "resources.jobs.foo.tasks[0].new_cluster.num_workers", dyn.V(0)) return dyn.Set(v, "resources.jobs.foo.tasks[0].new_cluster.num_workers", dyn.V(0))
}) })
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Equal(t, diag.Diagnostics{ assert.Equal(t, diag.Diagnostics{
{ {
Severity: diag.Warning, Severity: diag.Warning,
@ -260,7 +260,7 @@ func TestValidateSingleNodeClusterFailForPipelineClusters(t *testing.T) {
return dyn.Set(v, "resources.pipelines.foo.clusters[0].num_workers", dyn.V(0)) return dyn.Set(v, "resources.pipelines.foo.clusters[0].num_workers", dyn.V(0))
}) })
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Equal(t, diag.Diagnostics{ assert.Equal(t, diag.Diagnostics{
{ {
Severity: diag.Warning, Severity: diag.Warning,
@ -313,7 +313,7 @@ func TestValidateSingleNodeClusterFailForJobForEachTaskCluster(t *testing.T) {
return dyn.Set(v, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster.num_workers", dyn.V(0)) return dyn.Set(v, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster.num_workers", dyn.V(0))
}) })
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Equal(t, diag.Diagnostics{ assert.Equal(t, diag.Diagnostics{
{ {
Severity: diag.Warning, Severity: diag.Warning,
@ -397,7 +397,7 @@ func TestValidateSingleNodeClusterPassInteractiveClusters(t *testing.T) {
}) })
} }
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Empty(t, diags) assert.Empty(t, diags)
}) })
} }
@ -437,7 +437,7 @@ func TestValidateSingleNodeClusterPassJobClusters(t *testing.T) {
}) })
} }
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Empty(t, diags) assert.Empty(t, diags)
}) })
} }
@ -477,7 +477,7 @@ func TestValidateSingleNodeClusterPassJobTaskClusters(t *testing.T) {
}) })
} }
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Empty(t, diags) assert.Empty(t, diags)
}) })
} }
@ -514,7 +514,7 @@ func TestValidateSingleNodeClusterPassPipelineClusters(t *testing.T) {
}) })
} }
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Empty(t, diags) assert.Empty(t, diags)
}) })
} }
@ -558,7 +558,7 @@ func TestValidateSingleNodeClusterPassJobForEachTaskCluster(t *testing.T) {
}) })
} }
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster()) diags := bundle.Apply(ctx, b, SingleNodeCluster())
assert.Empty(t, diags) assert.Empty(t, diags)
}) })
} }

View File

@ -5,46 +5,16 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
) )
type validate struct{} func Validate(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return bundle.ApplyParallel(ctx, b,
type location struct { FastValidate(),
path string
rb bundle.ReadOnlyBundle
}
func (l location) Location() dyn.Location {
return l.rb.Config().GetLocation(l.path)
}
func (l location) Locations() []dyn.Location {
return l.rb.Config().GetLocations(l.path)
}
func (l location) Path() dyn.Path {
return dyn.MustPathFromString(l.path)
}
// Apply implements bundle.Mutator.
func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), bundle.Parallel(
FastValidateReadonly(),
// Slow mutators that require network or file i/o. These are only // Slow mutators that require network or file i/o. These are only
// run in the `bundle validate` command. // run in the `bundle validate` command.
FilesToSync(), FilesToSync(),
ValidateFolderPermissions(), ValidateFolderPermissions(),
ValidateSyncPatterns(), ValidateSyncPatterns(),
)) )
}
// Name implements bundle.Mutator.
func (v *validate) Name() string {
return "validate"
}
func Validate() bundle.Mutator {
return &validate{}
} }

View File

@ -16,7 +16,7 @@ import (
"github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/apierr"
) )
type validateArtifactPath struct{} type validateArtifactPath struct{ bundle.RO }
func ValidateArtifactPath() bundle.ReadOnlyMutator { func ValidateArtifactPath() bundle.ReadOnlyMutator {
return &validateArtifactPath{} return &validateArtifactPath{}
@ -74,9 +74,9 @@ func findVolumeInBundle(r config.Root, catalogName, schemaName, volumeName strin
return nil, nil, false return nil, nil, false
} }
func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics { func (v *validateArtifactPath) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// We only validate UC Volumes paths right now. // We only validate UC Volumes paths right now.
if !libraries.IsVolumesPath(rb.Config().Workspace.ArtifactPath) { if !libraries.IsVolumesPath(b.Config.Workspace.ArtifactPath) {
return nil return nil
} }
@ -85,25 +85,25 @@ func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBund
{ {
Summary: s, Summary: s,
Severity: diag.Error, Severity: diag.Error,
Locations: rb.Config().GetLocations("workspace.artifact_path"), Locations: b.Config.GetLocations("workspace.artifact_path"),
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}, },
} }
} }
catalogName, schemaName, volumeName, err := extractVolumeFromPath(rb.Config().Workspace.ArtifactPath) catalogName, schemaName, volumeName, err := extractVolumeFromPath(b.Config.Workspace.ArtifactPath)
if err != nil { if err != nil {
return wrapErrorMsg(err.Error()) return wrapErrorMsg(err.Error())
} }
volumeFullName := fmt.Sprintf("%s.%s.%s", catalogName, schemaName, volumeName) volumeFullName := fmt.Sprintf("%s.%s.%s", catalogName, schemaName, volumeName)
w := rb.WorkspaceClient() w := b.WorkspaceClient()
_, err = w.Volumes.ReadByName(ctx, volumeFullName) _, err = w.Volumes.ReadByName(ctx, volumeFullName)
if errors.Is(err, apierr.ErrPermissionDenied) { if errors.Is(err, apierr.ErrPermissionDenied) {
return wrapErrorMsg(fmt.Sprintf("cannot access volume %s: %s", volumeFullName, err)) return wrapErrorMsg(fmt.Sprintf("cannot access volume %s: %s", volumeFullName, err))
} }
if errors.Is(err, apierr.ErrNotFound) { if errors.Is(err, apierr.ErrNotFound) {
path, locations, ok := findVolumeInBundle(rb.Config(), catalogName, schemaName, volumeName) path, locations, ok := findVolumeInBundle(b.Config, catalogName, schemaName, volumeName)
if !ok { if !ok {
return wrapErrorMsg(fmt.Sprintf("volume %s does not exist", volumeFullName)) return wrapErrorMsg(fmt.Sprintf("volume %s does not exist", volumeFullName))
} }
@ -117,7 +117,7 @@ func (v *validateArtifactPath) Apply(ctx context.Context, rb bundle.ReadOnlyBund
this bundle but which has not been deployed yet. Please first deploy this bundle but which has not been deployed yet. Please first deploy
the volume using 'bundle deploy' and then switch over to using it in the volume using 'bundle deploy' and then switch over to using it in
the artifact_path.`, the artifact_path.`,
Locations: slices.Concat(rb.Config().GetLocations("workspace.artifact_path"), locations), Locations: slices.Concat(b.Config.GetLocations("workspace.artifact_path"), locations),
Paths: append([]dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, path), Paths: append([]dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, path),
}} }}

View File

@ -49,7 +49,7 @@ func TestValidateArtifactPathWithVolumeInBundle(t *testing.T) {
}) })
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), ValidateArtifactPath()) diags := ValidateArtifactPath().Apply(ctx, b)
assert.Equal(t, diag.Diagnostics{{ assert.Equal(t, diag.Diagnostics{{
Severity: diag.Error, Severity: diag.Error,
Summary: "volume catalogN.schemaN.volumeN does not exist", Summary: "volume catalogN.schemaN.volumeN does not exist",
@ -88,7 +88,6 @@ func TestValidateArtifactPath(t *testing.T) {
}}, diags) }}, diags)
} }
rb := bundle.ReadOnly(b)
ctx := context.Background() ctx := context.Background()
tcases := []struct { tcases := []struct {
@ -123,7 +122,7 @@ func TestValidateArtifactPath(t *testing.T) {
api.EXPECT().ReadByName(mock.Anything, "catalogN.schemaN.volumeN").Return(nil, tc.err) api.EXPECT().ReadByName(mock.Anything, "catalogN.schemaN.volumeN").Return(nil, tc.err)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.ApplyReadOnly(ctx, rb, ValidateArtifactPath()) diags := ValidateArtifactPath().Apply(ctx, b)
assertDiags(t, diags, tc.expectedSummary) assertDiags(t, diags, tc.expectedSummary)
} }
} }
@ -167,7 +166,7 @@ func TestValidateArtifactPathWithInvalidPaths(t *testing.T) {
bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}) bundletest.SetLocation(b, "workspace.artifact_path", []dyn.Location{{File: "config.yml", Line: 1, Column: 2}})
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), ValidateArtifactPath()) diags := ValidateArtifactPath().Apply(context.Background(), b)
require.Equal(t, diag.Diagnostics{{ require.Equal(t, diag.Diagnostics{{
Severity: diag.Error, Severity: diag.Error,
Summary: "expected UC volume path to be in the format /Volumes/<catalog>/<schema>/<volume>/..., got " + p, Summary: "expected UC volume path to be in the format /Volumes/<catalog>/<schema>/<volume>/..., got " + p,

Some files were not shown because too many files have changed in this diff Show More