mirror of https://github.com/databricks/cli.git
Merge remote-tracking branch 'origin' into async-logger-clean
This commit is contained in:
commit
fbc657d48c
|
@ -1 +1 @@
|
|||
99f644e72261ef5ecf8d74db20f4b7a1e09723cc
|
||||
e5c870006a536121442cfd2441bdc8a5fb76ae1e
|
|
@ -1,6 +1,10 @@
|
|||
## Changes
|
||||
<!-- Summary of your changes that are easy to understand -->
|
||||
<!-- Brief summary of your changes that is easy to understand -->
|
||||
|
||||
## Why
|
||||
<!-- Why are these changes needed? Provide the context that the reviewer might be missing.
|
||||
For example, were there any decisions behind the change that are not reflected in the code itself? -->
|
||||
|
||||
## Tests
|
||||
<!-- How is this tested? -->
|
||||
<!-- How have you tested the changes? -->
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ jobs:
|
|||
go-version-file: go.mod
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0
|
||||
uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 # v5.4.0
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
|
@ -95,7 +95,7 @@ jobs:
|
|||
# Exit with status code 1 if there are differences (i.e. unformatted files)
|
||||
git diff --exit-code
|
||||
- name: golangci-lint
|
||||
uses: golangci/golangci-lint-action@ec5d18412c0aeab7936cb16880d708ba2a64e1ae # v6.2.0
|
||||
uses: golangci/golangci-lint-action@2226d7cb06a077cd73e56eedd38eecad18e5d837 # v6.5.0
|
||||
with:
|
||||
version: v1.63.4
|
||||
args: --timeout=15m
|
||||
|
@ -145,7 +145,10 @@ jobs:
|
|||
go run main.go bundle schema > schema.json
|
||||
|
||||
# Add markdownDescription keyword to ajv
|
||||
echo "module.exports=function(a){a.addKeyword('markdownDescription')}" >> keywords.js
|
||||
echo "module.exports = function(a) {
|
||||
a.addKeyword('markdownDescription');
|
||||
a.addKeyword('deprecationMessage');
|
||||
}" >> keywords.js
|
||||
|
||||
for file in ./bundle/internal/schema/testdata/pass/*.yml; do
|
||||
ajv test -s schema.json -d $file --valid -c=./keywords.js
|
||||
|
|
|
@ -54,21 +54,21 @@ jobs:
|
|||
args: release --snapshot --skip docker
|
||||
|
||||
- name: Upload macOS binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_darwin_snapshot
|
||||
path: |
|
||||
dist/*_darwin_*/
|
||||
|
||||
- name: Upload Linux binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_linux_snapshot
|
||||
path: |
|
||||
dist/*_linux_*/
|
||||
|
||||
- name: Upload Windows binaries
|
||||
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4.6.1
|
||||
with:
|
||||
name: cli_windows_snapshot
|
||||
path: |
|
||||
|
|
|
@ -46,7 +46,7 @@ jobs:
|
|||
# QEMU is required to build cross platform docker images using buildx.
|
||||
# It allows virtualization of the CPU architecture at the application level.
|
||||
- name: Set up QEMU dependency
|
||||
uses: docker/setup-qemu-action@53851d14592bedcffcf25ea515637cff71ef929a # v3.3.0
|
||||
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
|
||||
|
||||
- name: Run GoReleaser
|
||||
id: releaser
|
||||
|
|
|
@ -17,5 +17,8 @@
|
|||
"python.envFile": "${workspaceRoot}/.env",
|
||||
"python.analysis.stubPath": ".vscode",
|
||||
"jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\<codecell\\>|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])",
|
||||
"jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------"
|
||||
"jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------",
|
||||
"files.associations": {
|
||||
"script": "shellscript"
|
||||
}
|
||||
}
|
||||
|
|
2
Makefile
2
Makefile
|
@ -20,7 +20,7 @@ lintcheck:
|
|||
# formatting/goimports will not be applied by 'make lint'. However, it will be applied by 'make fmt'.
|
||||
# If you need to ensure that formatting & imports are always fixed, do "make fmt lint"
|
||||
fmt:
|
||||
ruff format -q
|
||||
ruff format -qn
|
||||
golangci-lint run --enable-only="gofmt,gofumpt,goimports" --fix ./...
|
||||
|
||||
test:
|
||||
|
|
|
@ -390,6 +390,9 @@ func runTest(t *testing.T, dir, coverDir string, repls testdiff.ReplacementsCont
|
|||
if _, ok := Ignored[relPath]; ok {
|
||||
continue
|
||||
}
|
||||
if config.CompiledIgnoreObject.MatchesPath(relPath) {
|
||||
continue
|
||||
}
|
||||
unexpected = append(unexpected, relPath)
|
||||
if strings.HasPrefix(relPath, "out") {
|
||||
// We have a new file starting with "out"
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
Usage: find.py <regex>
|
||||
Finds all files within current directory matching regex. The output is sorted and slashes are always forward.
|
||||
|
||||
If --expect N is provided, the number of matches must be N or error is printed.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import argparse
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("regex")
|
||||
parser.add_argument("--expect", type=int)
|
||||
args = parser.parse_args()
|
||||
|
||||
regex = re.compile(args.regex)
|
||||
result = []
|
||||
|
||||
for root, dirs, files in os.walk("."):
|
||||
for filename in files:
|
||||
path = os.path.join(root, filename).lstrip("./\\").replace("\\", "/")
|
||||
if regex.search(path):
|
||||
result.append(path)
|
||||
|
||||
result.sort()
|
||||
for item in result:
|
||||
print(item)
|
||||
sys.stdout.flush()
|
||||
|
||||
if args.expect is not None:
|
||||
if args.expect != len(result):
|
||||
sys.exit(f"Expected {args.expect}, got {len(result)}")
|
|
@ -0,0 +1,3 @@
|
|||
command:
|
||||
- python
|
||||
- app.py
|
|
@ -0,0 +1,8 @@
|
|||
bundle:
|
||||
name: apps_yaml
|
||||
|
||||
resources:
|
||||
apps:
|
||||
myapp:
|
||||
name: myapp
|
||||
source_code_path: ./app
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"method": "POST",
|
||||
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml",
|
||||
"raw_body": "command:\n - python\n - app.py\n"
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
|
||||
>>> [CLI] bundle validate
|
||||
Name: apps_yaml
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default
|
||||
|
||||
Validation OK!
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,4 @@
|
|||
trace $CLI bundle validate
|
||||
trace $CLI bundle deploy
|
||||
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_yaml/default/files/app/app.yml")' out.requests.txt | sed 's/\\r//g' > out.app.yml.txt
|
||||
rm out.requests.txt
|
|
@ -0,0 +1 @@
|
|||
print("Hello world!")
|
|
@ -0,0 +1,12 @@
|
|||
bundle:
|
||||
name: apps_config_section
|
||||
|
||||
resources:
|
||||
apps:
|
||||
myapp:
|
||||
name: myapp
|
||||
source_code_path: ./app
|
||||
config:
|
||||
command:
|
||||
- python
|
||||
- app.py
|
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"method": "POST",
|
||||
"path": "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml",
|
||||
"raw_body": "command:\n - python\n - app.py\n"
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
|
||||
>>> [CLI] bundle validate
|
||||
Warning: App config section detected
|
||||
|
||||
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
|
||||
|
||||
Name: apps_config_section
|
||||
Target: default
|
||||
Workspace:
|
||||
User: [USERNAME]
|
||||
Path: /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default
|
||||
|
||||
Found 1 warning
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
Warning: App config section detected
|
||||
|
||||
remove 'config' from app resource 'myapp' section and use app.yml file in the root of this app instead
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
trace $CLI bundle validate
|
||||
trace $CLI bundle deploy
|
||||
jq 'select(.path == "/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/apps_config_section/default/files/app/app.yml")' out.requests.txt > out.app.yml.txt
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,26 @@
|
|||
Cloud = false
|
||||
RecordRequests = true
|
||||
|
||||
Ignore = [
|
||||
'.databricks',
|
||||
]
|
||||
|
||||
[[Server]]
|
||||
Pattern = "POST /api/2.0/apps"
|
||||
|
||||
[[Server]]
|
||||
Pattern = "GET /api/2.0/apps/myapp"
|
||||
Response.Body = '''
|
||||
{
|
||||
"name": "myapp",
|
||||
"description": "",
|
||||
"compute_status": {
|
||||
"state": "ACTIVE",
|
||||
"message": "App compute is active."
|
||||
},
|
||||
"app_status": {
|
||||
"state": "RUNNING",
|
||||
"message": "Application is running."
|
||||
}
|
||||
}
|
||||
'''
|
|
@ -1,3 +1,5 @@
|
|||
RecordRequests = false
|
||||
|
||||
[[Repls]]
|
||||
Old = '\\'
|
||||
New = '/'
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
export PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
uv venv -q --python 3.12 .venv
|
||||
if [[ "$OSTYPE" == "msys" || "$OSTYPE" == "cygwin" || "$OSTYPE" == "win32" ]]; then
|
||||
source .venv/Scripts/activate
|
||||
else
|
||||
source .venv/bin/activate
|
||||
fi
|
||||
uv pip install -q setuptools
|
|
@ -0,0 +1,18 @@
|
|||
Cloud = false
|
||||
RecordRequests = true
|
||||
Ignore = [
|
||||
'.venv',
|
||||
'dist',
|
||||
'build',
|
||||
'*egg-info',
|
||||
'.databricks',
|
||||
]
|
||||
|
||||
[[Server]]
|
||||
Pattern = "GET /api/2.1/clusters/get"
|
||||
Response.Body = '''
|
||||
{
|
||||
"cluster_id": "0717-132531-5opeqon1",
|
||||
"spark_version": "13.3.x-scala2.12"
|
||||
}
|
||||
'''
|
|
@ -2,8 +2,8 @@
|
|||
>>> errcode [CLI] bundle deploy
|
||||
Building whl1...
|
||||
Building whl2...
|
||||
Uploading [package name]
|
||||
Uploading [package name]
|
||||
Uploading [package name]...
|
||||
Uploading [package name]...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
Cloud = false
|
||||
RecordRequests = false
|
||||
|
||||
# The order in which files are uploaded can be different, so we just replace the name
|
||||
[[Repls]]
|
||||
Old="Uploading .*-0.0.1-py3-none-any.whl..."
|
||||
Old="Uploading (my_package|my_other_package)-0.0.1-py3-none-any.whl"
|
||||
New="Uploading [package name]"
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
=== Expecting to find no wheels
|
||||
>>> errcode find.py --expect 0 whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "dbfs:/path/to/dist/mywheel.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting no wheels to be uploaded
|
||||
>>> errcode sh -c jq .path < out.requests.txt | grep import | grep whl
|
||||
|
||||
Exit code: 1
|
|
@ -0,0 +1,12 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
title "Expecting to find no wheels"
|
||||
trace errcode find.py --expect 0 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting no wheels to be uploaded"
|
||||
trace errcode sh -c 'jq .path < out.requests.txt | grep import | grep whl'
|
||||
|
||||
rm out.requests.txt
|
|
@ -5,7 +5,8 @@ artifacts:
|
|||
my_test_code:
|
||||
type: whl
|
||||
path: "./my_test_code"
|
||||
build: "python3 setup.py bdist_wheel"
|
||||
# using 'python' there because 'python3' does not exist in virtualenv on windows
|
||||
build: python setup.py bdist_wheel
|
||||
|
||||
resources:
|
||||
jobs:
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building my_test_code...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building python_artifact...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheels in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-aaaaa-bbbbbb",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 1 wheels to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheels in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheels to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,46 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
package/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
|
||||
{
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"format": "MULTI_TASK",
|
||||
"max_concurrent_runs": 1,
|
||||
"name": "[default] My Wheel Job",
|
||||
"queue": {
|
||||
"enabled": true
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"existing_cluster_id": "0717-aaaaa-bbbbbb",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-local/default/files/package/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building python_artifact...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-aaaaa-bbbbbb",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"notebook_task": {
|
||||
"notebook_path": "/notebook.py"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel-notebook/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -5,11 +5,11 @@ artifacts:
|
|||
my_test_code:
|
||||
type: whl
|
||||
path: "./my_test_code"
|
||||
build: "python3 setup.py bdist_wheel"
|
||||
build: "python setup.py bdist_wheel"
|
||||
my_test_code_2:
|
||||
type: whl
|
||||
path: "./my_test_code"
|
||||
build: "python3 setup2.py bdist_wheel"
|
||||
build: "python setup2.py bdist_wheel"
|
||||
|
||||
resources:
|
||||
jobs:
|
|
@ -0,0 +1,42 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building my_test_code...
|
||||
Building my_test_code_2...
|
||||
Deploying resources...
|
||||
Deployment complete!
|
||||
Updating deployment state...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading my_test_code_2-0.0.1-py3-none-any.whl...
|
||||
|
||||
>>> find.py --expect 2 whl
|
||||
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 2 wheels in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
},
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 2 wheels to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code_2-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/my_test_code/dist/my_test_code_2-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
|
||||
|
||||
trace find.py --expect 2 whl
|
||||
|
||||
title "Expecting 2 wheels in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 2 wheels to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm -fr out.requests.txt
|
|
@ -13,4 +13,4 @@ resources:
|
|||
entry_point: "run"
|
||||
libraries:
|
||||
- whl: ./dist/*.whl
|
||||
- whl: ./dist/lib/my_test_code-0.0.1-py3-none-any.whl
|
||||
- whl: ./dist/lib/other_test_code-0.0.1-py3-none-any.whl
|
|
@ -0,0 +1,45 @@
|
|||
|
||||
>>> find.py --expect 2 whl
|
||||
dist/lib/other_test_code-0.0.1-py3-none-any.whl
|
||||
dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Deploying resources...
|
||||
Deployment complete!
|
||||
Updating deployment state...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading other_test_code-0.0.1-py3-none-any.whl...
|
||||
|
||||
=== Expecting to find 2 wheels, same as initially provided
|
||||
>>> find.py --expect 2 whl
|
||||
dist/lib/other_test_code-0.0.1-py3-none-any.whl
|
||||
dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 2 wheels in libraries section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body.tasks out.requests.txt
|
||||
[
|
||||
{
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"libraries": [
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
},
|
||||
{
|
||||
"whl": "/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
|
||||
}
|
||||
],
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
|
||||
=== Expecting 2 wheels to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/artifacts/.internal/other_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/lib/other_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/python-wheel/default/files/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,14 @@
|
|||
trace find.py --expect 2 whl
|
||||
|
||||
trace $CLI bundle deploy 2>&1 | sort # sorting because 'Uploading ...whl...' messages change order
|
||||
|
||||
title "Expecting to find 2 wheels, same as initially provided"
|
||||
trace find.py --expect 2 whl
|
||||
|
||||
title "Expecting 2 wheels in libraries section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body.tasks' out.requests.txt
|
||||
|
||||
title "Expecting 2 wheels to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -5,7 +5,7 @@ artifacts:
|
|||
my_test_code:
|
||||
type: whl
|
||||
path: "./my_test_code"
|
||||
build: "python3 setup.py bdist_wheel"
|
||||
build: python setup.py bdist_wheel
|
||||
|
||||
resources:
|
||||
jobs:
|
|
@ -0,0 +1,54 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Building my_test_code...
|
||||
Uploading my_test_code-0.0.1-py3-none-any.whl...
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/environment_key/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> find.py --expect 1 whl
|
||||
my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl
|
||||
|
||||
=== Expecting 1 wheel in environments section in /jobs/create
|
||||
>>> jq -s .[] | select(.path=="/api/2.1/jobs/create") | .body out.requests.txt
|
||||
{
|
||||
"deployment": {
|
||||
"kind": "BUNDLE",
|
||||
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/environment_key/default/state/metadata.json"
|
||||
},
|
||||
"edit_mode": "UI_LOCKED",
|
||||
"environments": [
|
||||
{
|
||||
"environment_key": "test_env",
|
||||
"spec": {
|
||||
"client": "1",
|
||||
"dependencies": [
|
||||
"/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"format": "MULTI_TASK",
|
||||
"max_concurrent_runs": 1,
|
||||
"name": "My Wheel Job",
|
||||
"queue": {
|
||||
"enabled": true
|
||||
},
|
||||
"tasks": [
|
||||
{
|
||||
"environment_key": "test_env",
|
||||
"existing_cluster_id": "0717-132531-5opeqon1",
|
||||
"python_wheel_task": {
|
||||
"entry_point": "run",
|
||||
"package_name": "my_test_code"
|
||||
},
|
||||
"task_key": "TestTask"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
=== Expecting 1 wheel to be uploaded
|
||||
>>> jq .path
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/artifacts/.internal/my_test_code-0.0.1-py3-none-any.whl"
|
||||
"/api/2.0/workspace-files/import-file/Workspace/Users/[USERNAME]/.bundle/environment_key/default/files/my_test_code/dist/my_test_code-0.0.1-py3-none-any.whl"
|
|
@ -0,0 +1,11 @@
|
|||
trace $CLI bundle deploy
|
||||
|
||||
trace find.py --expect 1 whl
|
||||
|
||||
title "Expecting 1 wheel in environments section in /jobs/create"
|
||||
trace jq -s '.[] | select(.path=="/api/2.1/jobs/create") | .body' out.requests.txt
|
||||
|
||||
title "Expecting 1 wheel to be uploaded"
|
||||
trace jq .path < out.requests.txt | grep import | grep whl | sort
|
||||
|
||||
rm out.requests.txt
|
|
@ -1,15 +0,0 @@
|
|||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly)
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:SingleNodeCluster
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:artifact_paths
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_cluster_key_defined
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=fast_validate(readonly) mutator (read-only)=parallel mutator (read-only)=validate:job_task_cluster_spec
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:folder_permissions
|
||||
10:07:59 Debug: ApplyReadOnly pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:validate_sync_patterns
|
||||
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync
|
||||
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< HTTP/0.0 000 OK pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
||||
< } pid=12345 mutator=validate mutator (read-only)=parallel mutator (read-only)=validate:files_to_sync sdk=true
|
|
@ -72,18 +72,30 @@
|
|||
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize
|
||||
10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit
|
||||
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit
|
||||
10:07:59 Debug: Apply pid=12345 mutator=validate
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly)
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:files_to_sync
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:folder_permissions
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=validate:validate_sync_patterns
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_cluster_key_defined
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:job_task_cluster_spec
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:SingleNodeCluster
|
||||
10:07:59 Debug: ApplyParallel pid=12345 mutator=fast_validate(readonly) mutator=validate:artifact_paths
|
||||
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||
< HTTP/1.1 404 Not Found
|
||||
< {
|
||||
< "message": "Workspace path not found"
|
||||
< } pid=12345 mutator=validate:files_to_sync sdk=true
|
||||
10:07:59 Debug: non-retriable error: Workspace path not found pid=12345 mutator=validate:files_to_sync sdk=true
|
||||
10:07:59 Debug: POST /api/2.0/workspace/mkdirs
|
||||
> {
|
||||
> "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||
> }
|
||||
< HTTP/1.1 200 OK pid=12345 mutator=validate:files_to_sync sdk=true
|
||||
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
|
||||
< HTTP/1.1 200 OK
|
||||
< {
|
||||
< "object_type": "DIRECTORY",
|
||||
< "path": "/Workspace/Users/[USERNAME]/.bundle/debug/default/files"
|
||||
< } pid=12345 mutator=validate:files_to_sync sdk=true
|
||||
10:07:59 Debug: Path /Workspace/Users/[USERNAME]/.bundle/debug/default/files has type directory (ID: 0) pid=12345 mutator=validate:files_to_sync
|
||||
10:07:59 Info: completed execution pid=12345 exit_code=0
|
||||
|
|
|
@ -1,4 +1 @@
|
|||
$CLI bundle validate --debug 2> full.stderr.txt
|
||||
grep -vw parallel full.stderr.txt > out.stderr.txt
|
||||
grep -w parallel full.stderr.txt | sed 's/[0-9]/0/g' | sort_lines.py > out.stderr.parallel.txt
|
||||
rm full.stderr.txt
|
||||
$CLI bundle validate --debug 2> out.stderr.txt
|
||||
|
|
|
@ -2,10 +2,8 @@ Warning: Include section is defined outside root file
|
|||
at include
|
||||
in a.yml:2:3
|
||||
|
||||
The include section is defined in a file that is not the root file.
|
||||
These values will be ignored because only the includes defined in
|
||||
the bundle root file (that is databricks.yml or databricks.yaml)
|
||||
are loaded.
|
||||
An include section is defined in a file that is not databricks.yml.
|
||||
Only includes defined in databricks.yml are applied.
|
||||
|
||||
Name: include_outside_root
|
||||
Target: default
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
bundle:
|
||||
name: state
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
test:
|
||||
name: "test"
|
||||
tasks:
|
||||
- task_key: "test-task"
|
||||
spark_python_task:
|
||||
python_file: ./test.py
|
||||
new_cluster:
|
||||
spark_version: 15.4.x-scala2.12
|
||||
node_type_id: i3.xlarge
|
||||
data_security_mode: SINGLE_USER
|
||||
num_workers: 0
|
||||
spark_conf:
|
||||
spark.master: "local[*, 4]"
|
||||
spark.databricks.cluster.profile: singleNode
|
||||
custom_tags:
|
||||
ResourceClass: SingleNode
|
|
@ -0,0 +1,4 @@
|
|||
{
|
||||
"method": "GET",
|
||||
"path": "/api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/state/default/state/terraform.tfstate"
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/state/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
||||
|
||||
>>> [CLI] bundle deploy
|
||||
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/state/default/files...
|
||||
Deploying resources...
|
||||
Updating deployment state...
|
||||
Deployment complete!
|
|
@ -0,0 +1,4 @@
|
|||
trace $CLI bundle deploy
|
||||
trace $CLI bundle deploy # We do 2 deploys because only 2nd deploy will pull state from remote after 1st created it
|
||||
jq 'select(.path == "/api/2.0/workspace-files/Workspace/Users/[USERNAME]/.bundle/state/default/state/terraform.tfstate")' out.requests.txt > out.state.txt
|
||||
rm out.requests.txt
|
|
@ -0,0 +1 @@
|
|||
print("Hello world!")
|
|
@ -0,0 +1,2 @@
|
|||
Cloud = false
|
||||
RecordRequests = true
|
|
@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
|
|||
|
||||
# Do not affect this repository's git behaviour #2318
|
||||
mv .gitignore out.gitignore
|
||||
rm .databricks/.gitignore
|
||||
|
|
|
@ -6,6 +6,7 @@ trace $CLI bundle validate -t prod
|
|||
|
||||
# Do not affect this repository's git behaviour #2318
|
||||
mv .gitignore out.gitignore
|
||||
rm .databricks/.gitignore
|
||||
|
||||
cd ../../
|
||||
|
||||
|
|
|
@ -6,3 +6,4 @@ trace $CLI bundle validate -t prod
|
|||
|
||||
# Do not affect this repository's git behaviour #2318
|
||||
mv .gitignore out.gitignore
|
||||
rm .databricks/.gitignore
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
*
|
|
@ -6,3 +6,6 @@ trace $CLI bundle validate -t prod
|
|||
|
||||
# Do not affect this repository's git behaviour #2318
|
||||
mv .gitignore out.gitignore
|
||||
|
||||
# Only for this test (default-sql), record .databricks/.gitignore in the output
|
||||
mv .databricks/.gitignore .databricks/out.gitignore
|
||||
|
|
|
@ -11,3 +11,4 @@ rm -fr .venv resources/__pycache__ uv.lock my_jobs_as_code.egg-info
|
|||
|
||||
# Do not affect this repository's git behaviour #2318
|
||||
mv .gitignore out.gitignore
|
||||
rm .databricks/.gitignore
|
||||
|
|
|
@ -35,6 +35,7 @@ Usage:
|
|||
databricks apps update NAME [flags]
|
||||
|
||||
Flags:
|
||||
--budget-policy-id string
|
||||
--description string The description of the app.
|
||||
-h, --help help for update
|
||||
--json JSON either inline JSON string or @path/to/file.json with request body (default JSON (0 bytes))
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"github.com/BurntSushi/toml"
|
||||
"github.com/databricks/cli/libs/testdiff"
|
||||
"github.com/databricks/cli/libs/testserver"
|
||||
ignore "github.com/sabhiram/go-gitignore"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
|
@ -51,6 +52,11 @@ type TestConfig struct {
|
|||
|
||||
// List of request headers to include when recording requests.
|
||||
IncludeRequestHeaders []string
|
||||
|
||||
// List of gitignore patterns to ignore when checking output files
|
||||
Ignore []string
|
||||
|
||||
CompiledIgnoreObject *ignore.GitIgnore
|
||||
}
|
||||
|
||||
type ServerStub struct {
|
||||
|
@ -111,6 +117,8 @@ func LoadConfig(t *testing.T, dir string) (TestConfig, string) {
|
|||
}
|
||||
}
|
||||
|
||||
result.CompiledIgnoreObject = ignore.CompileIgnoreLines(result.Ignore...)
|
||||
|
||||
return result, strings.Join(configs, ", ")
|
||||
}
|
||||
|
||||
|
|
|
@ -27,3 +27,7 @@ echo 123456
|
|||
|
||||
printf "\n=== Testing --version"
|
||||
trace $CLI --version
|
||||
|
||||
touch ignored_file.txt
|
||||
mkdir ignored_dir
|
||||
touch ignored_dir/hello.txt
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
# Badness = "Brief description of what's wrong with the test output, if anything"
|
||||
|
||||
Ignore = ['ignore*']
|
||||
|
||||
|
||||
#[GOOS]
|
||||
# Disable on Windows
|
||||
#windows = false
|
||||
|
|
|
@ -111,6 +111,11 @@ func AddHandlers(server *testserver.Server) {
|
|||
return ""
|
||||
})
|
||||
|
||||
server.Handle("GET", "/api/2.0/workspace-files/{path:.*}", func(req testserver.Request) any {
|
||||
path := req.Vars["path"]
|
||||
return req.Workspace.WorkspaceFilesExportFile(path)
|
||||
})
|
||||
|
||||
server.Handle("GET", "/api/2.1/unity-catalog/current-metastore-assignment", func(req testserver.Request) any {
|
||||
return testMetastore
|
||||
})
|
||||
|
|
|
@ -2,7 +2,7 @@ terraform {
|
|||
required_providers {
|
||||
databricks = {
|
||||
source = "databricks/databricks"
|
||||
version = "1.65.1"
|
||||
version = "1.68.0"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,9 +4,9 @@
|
|||
Initializing the backend...
|
||||
|
||||
Initializing provider plugins...
|
||||
- Finding databricks/databricks versions matching "1.65.1"...
|
||||
- Installing databricks/databricks v1.65.1...
|
||||
- Installed databricks/databricks v1.65.1 (unauthenticated)
|
||||
- Finding databricks/databricks versions matching "1.68.0"...
|
||||
- Installing databricks/databricks v1.68.0...
|
||||
- Installed databricks/databricks v1.68.0 (unauthenticated)
|
||||
|
||||
Terraform has created a lock file .terraform.lock.hcl to record the provider
|
||||
selections it made above. Include this file in your version control repository
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue