This commit is contained in:
Shreyas Goenka 2025-02-28 12:55:03 +01:00
commit c898aafbed
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
94 changed files with 1328 additions and 925 deletions

View File

@ -1,5 +1,29 @@
# Version changelog # Version changelog
## [Release] Release v0.242.0
Notable changes:
Starting this version CLI does not load bundle auth information when CLI command is executed inside the bundle directory with explicitly provided via `-p` flag profile.
For more details see the related GitHub issue https://github.com/databricks/cli/issues/1358
CLI:
* Do not load host from bundle for CLI commands when profile flag is used ([#2335](https://github.com/databricks/cli/pull/2335)).
* Fixed accessing required path parameters in CLI generation when --json flag ([#2373](https://github.com/databricks/cli/pull/2373)).
Bundles:
* Provide instructions for testing in the default-python template ([#2355](https://github.com/databricks/cli/pull/2355)).
* Remove `run_as` from the built-in templates ([#2044](https://github.com/databricks/cli/pull/2044)).
* Change warning about incomplete permissions section into a recommendation ([#2043](https://github.com/databricks/cli/pull/2043)).
* Refine `mode: production` diagnostic output ([#2236](https://github.com/databricks/cli/pull/2236)).
* Support serverless mode in default-python template (explicit prompt) ([#2377](https://github.com/databricks/cli/pull/2377)).
* Set default data_security_mode to "SINGLE_USER" in bundle templates ([#2372](https://github.com/databricks/cli/pull/2372)).
* Fixed spark version check for clusters defined in the same bundle ([#2374](https://github.com/databricks/cli/pull/2374)).
API Changes:
* Added `databricks genie get-message-query-result-by-attachment` command.
OpenAPI commit 99f644e72261ef5ecf8d74db20f4b7a1e09723cc (2025-02-11)
## [Release] Release v0.241.2 ## [Release] Release v0.241.2
This is a bugfix release to address an issue where jobs with tasks with a This is a bugfix release to address an issue where jobs with tasks with a

View File

@ -0,0 +1,54 @@
bundle:
name: same_name_libraries
variables:
cluster:
default:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode
artifacts:
whl1:
type: whl
path: ./whl1
whl2:
type: whl
path: ./whl2
resources:
jobs:
test:
name: "test"
tasks:
- task_key: task1
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl
- pypi:
package: test_package
- task_key: task2
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl2/dist/*.whl
- maven:
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
- task_key: task3
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl

View File

@ -0,0 +1,14 @@
>>> errcode [CLI] bundle deploy
Building whl1...
Building whl2...
Error: Duplicate local library names: my_default_python-0.0.1-py3-none-any.whl
at resources.jobs.test.tasks[0].libraries[0].whl
resources.jobs.test.tasks[1].libraries[0].whl
in databricks.yml:36:15
databricks.yml:45:15
Local library names must be unique but found libraries with the same name: whl1/dist/my_default_python-0.0.1-py3-none-any.whl, whl2/dist/my_default_python-0.0.1-py3-none-any.whl
Exit code: 1

View File

@ -0,0 +1,2 @@
trace errcode $CLI bundle deploy
rm -rf whl1 whl2

View File

@ -0,0 +1,3 @@
[[Repls]]
Old = '\\'
New = '/'

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_default_python
setup(
name="my_default_python",
version=my_default_python.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_default_python/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_default_python.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,56 @@
bundle:
name: unique_name_libraries
variables:
cluster:
default:
spark_version: 15.4.x-scala2.12
node_type_id: i3.xlarge
data_security_mode: SINGLE_USER
num_workers: 0
spark_conf:
spark.master: "local[*, 4]"
spark.databricks.cluster.profile: singleNode
custom_tags:
ResourceClass: SingleNode
artifacts:
whl1:
type: whl
path: ./whl1
whl2:
type: whl
path: ./whl2
resources:
jobs:
test:
name: "test"
tasks:
- task_key: task1
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_package
libraries:
- whl: ./whl1/dist/*.whl
- whl: cowsay
- pypi:
package: test_package
- task_key: task2
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_other_package
libraries:
- whl: ./whl2/dist/*.whl
- whl: cowsay
- maven:
coordinates: org.apache.spark:spark-sql_2.12:3.1.1
- task_key: task3
new_cluster: ${var.cluster}
python_wheel_task:
entry_point: main
package_name: my_default_python
libraries:
- whl: ./whl1/dist/*.whl

View File

@ -0,0 +1,10 @@
>>> errcode [CLI] bundle deploy
Building whl1...
Building whl2...
Uploading [package name]
Uploading [package name]
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

View File

@ -0,0 +1,2 @@
trace errcode $CLI bundle deploy
rm -rf whl1 whl2

View File

@ -0,0 +1,6 @@
Cloud = false
# The order in which files are uploaded can be different, so we just replace the name
[[Repls]]
Old="Uploading .*-0.0.1-py3-none-any.whl..."
New="Uploading [package name]"

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_package
setup(
name="my_package",
version=my_package.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_package/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_package.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -0,0 +1 @@
print("hello")

View File

@ -0,0 +1,28 @@
from setuptools import setup, find_packages
import sys
sys.path.append("./src")
import my_other_package
setup(
name="my_other_package",
version=my_other_package.__version__,
url="https://databricks.com",
author="[USERNAME]",
description="wheel file based on my_other_package/src",
packages=find_packages(where="./src"),
package_dir={"": "src"},
entry_points={
"packages": [
"main=my_other_package.main:main",
],
},
install_requires=[
# Dependencies in case the output wheel file is used as a library dependency.
# For defining dependencies, when this package is used in Databricks, see:
# https://docs.databricks.com/dev-tools/bundles/library-dependencies.html
"setuptools"
],
)

View File

@ -0,0 +1 @@
__version__ = "0.0.1"

View File

@ -1,81 +1,77 @@
10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug" 10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug"
10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345 10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345
10:07:59 Debug: Apply pid=12345 mutator=load 10:07:59 Info: Phase: load pid=12345
10:07:59 Info: Phase: load pid=12345 mutator=load 10:07:59 Debug: Apply pid=12345 mutator=EntryPoint
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint 10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=scripts.preinit
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit 10:07:59 Debug: Apply pid=12345 mutator=ProcessRootIncludes
10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit 10:07:59 Debug: Apply pid=12345 mutator=VerifyCliVersion
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes 10:07:59 Debug: Apply pid=12345 mutator=EnvironmentsToTargets
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=ComputeIdToClusterId
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion 10:07:59 Debug: Apply pid=12345 mutator=InitializeVariables
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets 10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables 10:07:59 Debug: Apply pid=12345 mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default) 10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load) 10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget
10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default)
10:07:59 Debug: Apply pid=12345 mutator=<func> 10:07:59 Debug: Apply pid=12345 mutator=<func>
10:07:59 Debug: Apply pid=12345 mutator=initialize 10:07:59 Info: Phase: initialize pid=12345
10:07:59 Info: Phase: initialize pid=12345 mutator=initialize 10:07:59 Debug: Apply pid=12345 mutator=validate:AllResourcesHaveValues
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq 10:07:59 Debug: Apply pid=12345 mutator=validate:interpolation_in_auth_config
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues 10:07:59 Debug: Apply pid=12345 mutator=RewriteSyncPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths 10:07:59 Debug: Apply pid=12345 mutator=SyncDefaultPath
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath 10:07:59 Debug: Apply pid=12345 mutator=SyncInferRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot 10:07:59 Debug: Apply pid=12345 mutator=PopulateCurrentUser
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser
10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me 10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me
< HTTP/1.1 200 OK < HTTP/1.1 200 OK
< { < {
< "id": "[USERID]", < "id": "[USERID]",
< "userName": "[USERNAME]" < "userName": "[USERNAME]"
< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true < } pid=12345 mutator=PopulateCurrentUser sdk=true
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails 10:07:59 Debug: Apply pid=12345 mutator=LoadGitDetails
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset 10:07:59 Debug: Apply pid=12345 mutator=ApplySourceLinkedDeploymentPreset
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot 10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot 10:07:59 Debug: Apply pid=12345 mutator=ExpandWorkspaceRoot
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths 10:07:59 Debug: Apply pid=12345 mutator=DefaultWorkspacePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix 10:07:59 Debug: Apply pid=12345 mutator=PrependWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix 10:07:59 Debug: Apply pid=12345 mutator=RewriteWorkspacePrefix
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables 10:07:59 Debug: Apply pid=12345 mutator=SetVariables
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(init)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators) 10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators)
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveResourceReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences 10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters 10:07:59 Debug: Apply pid=12345 mutator=MergeJobClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters 10:07:59 Debug: Apply pid=12345 mutator=MergeJobParameters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks 10:07:59 Debug: Apply pid=12345 mutator=MergeJobTasks
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters 10:07:59 Debug: Apply pid=12345 mutator=MergePipelineClusters
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps 10:07:59 Debug: Apply pid=12345 mutator=MergeApps
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency 10:07:59 Debug: Apply pid=12345 mutator=CaptureSchemaDependency
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions 10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs 10:07:59 Debug: Apply pid=12345 mutator=SetRunAs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute 10:07:59 Debug: Apply pid=12345 mutator=OverrideCompute
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults 10:07:59 Debug: Apply pid=12345 mutator=ConfigureDashboardDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults 10:07:59 Debug: Apply pid=12345 mutator=ConfigureVolumeDefaults
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode 10:07:59 Debug: Apply pid=12345 mutator=ProcessTargetMode
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets 10:07:59 Debug: Apply pid=12345 mutator=ApplyPresets
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing 10:07:59 Debug: Apply pid=12345 mutator=DefaultQueueing
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths 10:07:59 Debug: Apply pid=12345 mutator=ExpandPipelineGlobPaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS 10:07:59 Debug: Apply pid=12345 mutator=ConfigureWSFS
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths 10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning 10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate 10:07:59 Debug: Apply pid=12345 mutator=apps.Validate
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions 10:07:59 Debug: Apply pid=12345 mutator=ValidateSharedRootPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions 10:07:59 Debug: Apply pid=12345 mutator=ApplyBundlePermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions 10:07:59 Debug: Apply pid=12345 mutator=FilterCurrentUserFromPermissions
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines 10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize 10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize
10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit 10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit
10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit 10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit
10:07:59 Debug: Apply pid=12345 mutator=validate 10:07:59 Debug: Apply pid=12345 mutator=validate
10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files
< HTTP/1.1 404 Not Found < HTTP/1.1 404 Not Found

View File

@ -0,0 +1,4 @@
include:
- b.yml
- c.yml

View File

@ -0,0 +1,5 @@
bundle:
name: include_outside_root
include:
- a.yml

View File

@ -0,0 +1,16 @@
Warning: Include section is defined outside root file
at include
in a.yml:2:3
The include section is defined in a file that is not the root file.
These values will be ignored because only the includes defined in
the bundle root file (that is databricks.yml or databricks.yaml)
are loaded.
Name: include_outside_root
Target: default
Workspace:
User: [USERNAME]
Path: /Workspace/Users/[USERNAME]/.bundle/include_outside_root/default
Found 1 warning

View File

@ -0,0 +1 @@
$CLI bundle validate

View File

@ -1,5 +1,12 @@
>>> errcode [CLI] bundle validate -o json >>> errcode [CLI] bundle validate -o json
Warning: Variable interpolation is not supported for fields that configure authentication
at workspace.host
in databricks.yml:10:9
Interpolation is not supported for the field workspace.host. Please set
the DATABRICKS_HOST environment variable if you wish to configure this field at runtime.
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
{ {
@ -27,6 +34,13 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara
Exit code: 1 Exit code: 1
>>> errcode [CLI] bundle validate >>> errcode [CLI] bundle validate
Warning: Variable interpolation is not supported for fields that configure authentication
at workspace.host
in databricks.yml:10:9
Interpolation is not supported for the field workspace.host. Please set
the DATABRICKS_HOST environment variable if you wish to configure this field at runtime.
Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name
Name: host Name: host
@ -34,6 +48,6 @@ Target: default
Workspace: Workspace:
Host: ${var.host} Host: ${var.host}
Found 1 error Found 1 error and 1 warning
Exit code: 1 Exit code: 1

View File

@ -0,0 +1,15 @@
>>> [CLI] selftest panic
The Databricks CLI unexpectedly had a fatal error.
Please report this issue to Databricks in the form of a GitHub issue at:
https://github.com/databricks/cli
CLI Version: [DEV_VERSION]
Panic Payload: the databricks selftest panic command always panics
Stack Trace:
goroutine 1 [running]:
runtime/debug.Stack()
Exit code: 1

5
acceptance/panic/script Normal file
View File

@ -0,0 +1,5 @@
# We filter anything after runtime/debug.Stack() in the output because the stack
# trace itself is hard to perform replacements on, since it can depend upon the
# exact setup of where the modules are installed in your Go setup, memory addresses
# at runtime etc.
trace $CLI selftest panic 2>&1 | sed '/runtime\/debug\.Stack()/q'

View File

View File

@ -70,6 +70,6 @@ env:
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u)) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), &u)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
} }

View File

@ -51,7 +51,7 @@ func TestAppsValidate(t *testing.T) {
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "app.yml detected", diags[0].Summary) require.Equal(t, "app.yml detected", diags[0].Summary)
require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource") require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource")
@ -90,7 +90,7 @@ func TestAppsValidateSameSourcePath(t *testing.T) {
bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "Duplicate app source code path", diags[0].Summary) require.Equal(t, "Duplicate app source code path", diags[0].Summary)
require.Contains(t, diags[0].Detail, "has the same source code path as app resource") require.Contains(t, diags[0].Detail, "has the same source code path as app resource")

View File

@ -38,5 +38,5 @@ func (m *all) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
} }
} }
return bundle.Apply(ctx, b, bundle.Seq(out...)) return bundle.ApplySeq(ctx, b, out...)
} }

View File

@ -53,5 +53,5 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// if we do it before, any files that are generated by build command will // if we do it before, any files that are generated by build command will
// not be included into artifact.Files and thus will not be uploaded. // not be included into artifact.Files and thus will not be uploaded.
mutators = append(mutators, &expandGlobs{name: m.name}) mutators = append(mutators, &expandGlobs{name: m.name})
return bundle.Apply(ctx, b, bundle.Seq(mutators...)) return bundle.ApplySeq(ctx, b, mutators...)
} }

View File

@ -39,11 +39,11 @@ func TestExpandGlobs_Nominal(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that the expanded paths are correct. // Assert that the expanded paths are correct.
@ -80,11 +80,11 @@ func TestExpandGlobs_InvalidPattern(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
assert.Len(t, diags, 4) assert.Len(t, diags, 4)
assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary) assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary)
@ -128,11 +128,11 @@ func TestExpandGlobs_NoMatches(t *testing.T) {
bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}})
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
// Run prepare first to make paths absolute. // Run prepare first to make paths absolute.
&prepare{"test"}, &prepare{"test"},
&expandGlobs{"test"}, &expandGlobs{"test"},
)) )
assert.Len(t, diags, 2) assert.Len(t, diags, 2)
assert.Equal(t, "c*.txt: no matching files", diags[0].Summary) assert.Equal(t, "c*.txt: no matching files", diags[0].Summary)

View File

@ -161,6 +161,19 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
return diags return diags
} }
if len(this.Include) > 0 {
diags = diags.Append(diag.Diagnostic{
Severity: diag.Warning,
Summary: "Include section is defined outside root file",
Detail: `The include section is defined in a file that is not the root file.
These values will be ignored because only the includes defined in
the bundle root file (that is databricks.yml or databricks.yaml)
are loaded.`,
Locations: this.GetLocations("include"),
Paths: []dyn.Path{dyn.MustPathFromString("include")},
})
}
err := b.Config.Merge(this) err := b.Config.Merge(this)
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))

View File

@ -98,5 +98,5 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
// Swap out the original includes list with the expanded globs. // Swap out the original includes list with the expanded globs.
b.Config.Include = files b.Config.Include = files
return bundle.Apply(ctx, b, bundle.Seq(out...)) return bundle.ApplySeq(ctx, b, out...)
} }

View File

@ -1,16 +1,19 @@
package mutator package mutator
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/config/loader"
pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python"
"github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/config/validate"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/libs/diag"
) )
func DefaultMutators() []bundle.Mutator { func DefaultMutators(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return []bundle.Mutator{ return bundle.ApplySeq(ctx, b,
loader.EntryPoint(), loader.EntryPoint(),
// Execute preinit script before processing includes. // Execute preinit script before processing includes.
@ -31,5 +34,5 @@ func DefaultMutators() []bundle.Mutator {
// Note: This mutator must run before the target overrides are merged. // Note: This mutator must run before the target overrides are merged.
// See the mutator for more details. // See the mutator for more details.
validate.UniqueResourceKeys(), validate.UniqueResourceKeys(),
} )
} }

View File

@ -80,7 +80,7 @@ func TestPrependWorkspaceForDefaultConfig(t *testing.T) {
}, },
}, },
} }
diags := bundle.Apply(context.Background(), b, bundle.Seq(DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix())) diags := bundle.ApplySeq(context.Background(), b, DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix())
require.Empty(t, diags) require.Empty(t, diags)
require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev", b.Config.Workspace.RootPath) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev", b.Config.Workspace.RootPath)
require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev/artifacts", b.Config.Workspace.ArtifactPath) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev/artifacts", b.Config.Workspace.ArtifactPath)

View File

@ -163,8 +163,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle {
func TestProcessTargetModeDevelopment(t *testing.T) { func TestProcessTargetModeDevelopment(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Job 1 // Job 1
@ -224,8 +223,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
@ -239,8 +237,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place (Azure allows more characters than AWS). // Assert that tag normalization took place (Azure allows more characters than AWS).
@ -254,8 +251,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) {
}) })
b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Assert that tag normalization took place. // Assert that tag normalization took place.
@ -311,8 +307,7 @@ func TestValidateDevelopmentMode(t *testing.T) {
func TestProcessTargetModeDefault(t *testing.T) { func TestProcessTargetModeDefault(t *testing.T) {
b := mockBundle("") b := mockBundle("")
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name)
assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name)
@ -429,8 +424,7 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) {
reflect.TypeOf(&resources.Volume{}), reflect.TypeOf(&resources.Volume{}),
} }
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
resources := reflect.ValueOf(b.Config.Resources) resources := reflect.ValueOf(b.Config.Resources)
@ -484,8 +478,7 @@ func TestPrefixAlreadySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.NamePrefix = "custom_lennart_deploy_" b.Config.Presets.NamePrefix = "custom_lennart_deploy_"
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "custom_lennart_deploy_job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "custom_lennart_deploy_job1", b.Config.Resources.Jobs["job1"].Name)
@ -498,8 +491,7 @@ func TestTagsAlreadySet(t *testing.T) {
"dev": "foo", "dev": "foo",
} }
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["custom"]) assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["custom"])
@ -510,8 +502,7 @@ func TestTagsNil(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.Tags = nil b.Config.Presets.Tags = nil
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"])
@ -521,8 +512,7 @@ func TestTagsEmptySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.Tags = map[string]string{} b.Config.Presets.Tags = map[string]string{}
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"])
@ -532,8 +522,7 @@ func TestJobsMaxConcurrentRunsAlreadySet(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.JobsMaxConcurrentRuns = 10 b.Config.Presets.JobsMaxConcurrentRuns = 10
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, 10, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) assert.Equal(t, 10, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns)
@ -543,8 +532,7 @@ func TestJobsMaxConcurrentRunsDisabled(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.JobsMaxConcurrentRuns = 1 b.Config.Presets.JobsMaxConcurrentRuns = 1
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns)
@ -554,8 +542,7 @@ func TestTriggerPauseStatusWhenUnpaused(t *testing.T) {
b := mockBundle(config.Development) b := mockBundle(config.Development)
b.Config.Presets.TriggerPauseStatus = config.Unpaused b.Config.Presets.TriggerPauseStatus = config.Unpaused
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.ErrorContains(t, diags.Error(), "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default") require.ErrorContains(t, diags.Error(), "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default")
} }
@ -564,8 +551,7 @@ func TestPipelinesDevelopmentDisabled(t *testing.T) {
notEnabled := false notEnabled := false
b.Config.Presets.PipelinesDevelopment = &notEnabled b.Config.Presets.PipelinesDevelopment = &notEnabled
m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets())
diags := bundle.Apply(context.Background(), b, m)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development)

View File

@ -176,7 +176,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) {
{ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"}, {ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"},
}, nil) }, nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster)
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
@ -203,7 +203,7 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables") require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables")
} }
@ -229,7 +229,7 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) {
ctx := context.Background() ctx := context.Background()
ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd") ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd")
diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())) diags := bundle.ApplySeq(ctx, b, SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value)
} }

View File

@ -828,12 +828,10 @@ func TestTranslatePathWithComplexVariables(t *testing.T) {
}) })
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
diags = bundle.Apply(ctx, b, diags = bundle.ApplySeq(ctx, b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
mutator.TranslatePaths(), mutator.TranslatePaths())
))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal( assert.Equal(

View File

@ -0,0 +1,77 @@
package validate
import (
"context"
"fmt"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/auth"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/cli/libs/dyn/dynvar"
)
type noInterpolationInAuthConfig struct{}
func NoInterpolationInAuthConfig() bundle.Mutator {
return &noInterpolationInAuthConfig{}
}
func (f *noInterpolationInAuthConfig) Name() string {
return "validate:interpolation_in_auth_config"
}
func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
authFields := []string{
// Generic attributes.
"host", "profile", "auth_type", "metadata_service_url",
// OAuth specific attributes.
"client_id",
// Google specific attributes.
"google_service_account",
// Azure specific attributes.
"azure_resource_id", "azure_use_msi", "azure_client_id", "azure_tenant_id",
"azure_environment", "azure_login_app_id",
}
diags := diag.Diagnostics{}
for _, fieldName := range authFields {
p := dyn.NewPath(dyn.Key("workspace"), dyn.Key(fieldName))
v, err := dyn.GetByPath(b.Config.Value(), p)
if dyn.IsNoSuchKeyError(err) {
continue
}
if err != nil {
return diag.FromErr(err)
}
if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil {
continue
}
vv := v.MustString()
// Check if the field contains interpolation.
if dynvar.ContainsVariableReference(vv) {
envVar, ok := auth.GetEnvFor(fieldName)
if !ok {
continue
}
diags = append(diags, diag.Diagnostic{
Severity: diag.Warning,
Summary: "Variable interpolation is not supported for fields that configure authentication",
Detail: fmt.Sprintf(`Interpolation is not supported for the field %s. Please set
the %s environment variable if you wish to configure this field at runtime.`, p.String(), envVar),
Locations: v.Locations(),
Paths: []dyn.Path{p},
})
}
}
return diags
}

View File

@ -1,30 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type DeferredMutator struct {
mutator Mutator
finally Mutator
}
func (d *DeferredMutator) Name() string {
return "deferred"
}
func Defer(mutator, finally Mutator) Mutator {
return &DeferredMutator{
mutator: mutator,
finally: finally,
}
}
func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
var diags diag.Diagnostics
diags = diags.Extend(Apply(ctx, b, d.mutator))
diags = diags.Extend(Apply(ctx, b, d.finally))
return diags
}

View File

@ -1,114 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/databricks/cli/libs/diag"
"github.com/stretchr/testify/assert"
)
type mutatorWithError struct {
applyCalled int
errorMsg string
}
func (t *mutatorWithError) Name() string {
return "mutatorWithError"
}
func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++
return diag.Errorf(t.errorMsg) // nolint:govet
}
func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, m2, m3), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenFirstFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(mErr, m1, m2), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, mErr, m2), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorWhenLastOneFails(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanup := &testMutator{}
deferredMutator := Defer(Seq(m1, m2, mErr), cleanup)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
assert.ErrorContains(t, diags.Error(), "mutator error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanup.applyCalled)
}
func TestDeferredMutatorCombinesErrorMessages(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
mErr := &mutatorWithError{errorMsg: "mutator error occurred"}
cleanupErr := &mutatorWithError{errorMsg: "cleanup error occurred"}
deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr)
b := &Bundle{}
diags := Apply(context.Background(), b, deferredMutator)
var errs []string
for _, d := range diags {
errs = append(errs, d.Summary)
}
assert.Contains(t, errs, "mutator error occurred")
assert.Contains(t, errs, "cleanup error occurred")
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, mErr.applyCalled)
assert.Equal(t, 1, cleanupErr.applyCalled)
}

View File

@ -32,10 +32,10 @@ func TestLoadWithNoState(t *testing.T) {
t.Setenv("DATABRICKS_TOKEN", "foobar") t.Setenv("DATABRICKS_TOKEN", "foobar")
b.WorkspaceClient() b.WorkspaceClient()
diags := bundle.Apply(context.Background(), b, bundle.Seq( diags := bundle.ApplySeq(context.Background(), b,
Initialize(), Initialize(),
Load(ErrorOnEmptyState), Load(ErrorOnEmptyState),
)) )
require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'") require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'")
} }

View File

@ -1,40 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type ifMutator struct {
condition func(context.Context, *Bundle) (bool, error)
onTrueMutator Mutator
onFalseMutator Mutator
}
func If(
condition func(context.Context, *Bundle) (bool, error),
onTrueMutator Mutator,
onFalseMutator Mutator,
) Mutator {
return &ifMutator{
condition, onTrueMutator, onFalseMutator,
}
}
func (m *ifMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
v, err := m.condition(ctx, b)
if err != nil {
return diag.FromErr(err)
}
if v {
return Apply(ctx, b, m.onTrueMutator)
} else {
return Apply(ctx, b, m.onFalseMutator)
}
}
func (m *ifMutator) Name() string {
return "If"
}

View File

@ -1,53 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func TestIfMutatorTrue(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return true, nil
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
}
func TestIfMutatorFalse(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return false, nil
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
}
func TestIfMutatorError(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
ifMutator := If(func(context.Context, *Bundle) (bool, error) {
return true, assert.AnError
}, m1, m2)
b := &Bundle{}
diags := Apply(context.Background(), b, ifMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
}

View File

@ -92,7 +92,7 @@ func expandLibraries(b *bundle.Bundle, p dyn.Path, v dyn.Value) (diag.Diagnostic
for _, match := range matches { for _, match := range matches {
output = append(output, dyn.NewValue(map[string]dyn.Value{ output = append(output, dyn.NewValue(map[string]dyn.Value{
libType: dyn.V(match), libType: dyn.NewValue(match, lib.Locations()),
}, lib.Locations())) }, lib.Locations()))
} }
} }

View File

@ -0,0 +1,104 @@
package libraries
import (
"context"
"path/filepath"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
)
type checkForSameNameLibraries struct{}
var patterns = []dyn.Pattern{
taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()),
forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()),
envDepsPattern.Append(dyn.AnyIndex()),
}
type libData struct {
fullPath string
locations []dyn.Location
paths []dyn.Path
otherPaths []string
}
func (c checkForSameNameLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
var diags diag.Diagnostics
libs := make(map[string]*libData)
err := b.Config.Mutate(func(rootConfig dyn.Value) (dyn.Value, error) {
var err error
for _, pattern := range patterns {
rootConfig, err = dyn.MapByPattern(rootConfig, pattern, func(p dyn.Path, libraryValue dyn.Value) (dyn.Value, error) {
libPath, ok := libraryValue.AsString()
if !ok {
return libraryValue, nil
}
// If not local library, skip the check
if !IsLibraryLocal(libPath) {
return libraryValue, nil
}
lib := filepath.Base(libPath)
// If the same basename was seen already but full path is different
// then it's a duplicate. Add the location to the location list.
lp, ok := libs[lib]
if !ok {
libs[lib] = &libData{
fullPath: libPath,
locations: []dyn.Location{libraryValue.Location()},
paths: []dyn.Path{p},
otherPaths: []string{},
}
} else if lp.fullPath != libPath {
lp.locations = append(lp.locations, libraryValue.Location())
lp.paths = append(lp.paths, p)
lp.otherPaths = append(lp.otherPaths, libPath)
}
return libraryValue, nil
})
if err != nil {
return dyn.InvalidValue, err
}
}
if err != nil {
return dyn.InvalidValue, err
}
return rootConfig, nil
})
// Iterate over all the libraries and check if there are any duplicates.
// Duplicates will have more than one location.
// If there are duplicates, add a diagnostic.
for lib, lv := range libs {
if len(lv.locations) > 1 {
diags = append(diags, diag.Diagnostic{
Severity: diag.Error,
Summary: "Duplicate local library names: " + lib,
Detail: "Local library names must be unique but found libraries with the same name: " + lv.fullPath + ", " + strings.Join(lv.otherPaths, ", "),
Locations: lv.locations,
Paths: lv.paths,
})
}
}
if err != nil {
diags = diags.Extend(diag.FromErr(err))
}
return diags
}
func (c checkForSameNameLibraries) Name() string {
return "CheckForSameNameLibraries"
}
func CheckForSameNameLibraries() bundle.Mutator {
return checkForSameNameLibraries{}
}

View File

@ -93,7 +93,7 @@ func TestArtifactUploadForWorkspace(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated
@ -181,7 +181,7 @@ func TestArtifactUploadForVolumes(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated
@ -225,7 +225,7 @@ func TestArtifactUploadWithNoLibraryReference(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Equal(t, "/Workspace/foo/bar/artifacts/.internal/source.whl", b.Config.Artifacts["whl"].Files[0].RemotePath) require.Equal(t, "/Workspace/foo/bar/artifacts/.internal/source.whl", b.Config.Artifacts["whl"].Files[0].RemotePath)
@ -311,7 +311,7 @@ func TestUploadMultipleLibraries(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil).Once() ).Return(nil).Once()
diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Test that libraries path is updated // Test that libraries path is updated

View File

@ -1,28 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
)
type LogStringMutator struct {
message string
}
func (d *LogStringMutator) Name() string {
return "log_string"
}
func LogString(message string) Mutator {
return &LogStringMutator{
message: message,
}
}
func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
cmdio.LogString(ctx, m.message)
return nil
}

View File

@ -48,6 +48,17 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics {
return diags return diags
} }
func ApplySeq(ctx context.Context, b *Bundle, mutators ...Mutator) diag.Diagnostics {
diags := diag.Diagnostics{}
for _, m := range mutators {
diags = diags.Extend(Apply(ctx, b, m))
if diags.HasError() {
return diags
}
}
return diags
}
type funcMutator struct { type funcMutator struct {
fn func(context.Context, *Bundle) diag.Diagnostics fn func(context.Context, *Bundle) diag.Diagnostics
} }

View File

@ -19,7 +19,7 @@ func (t *testMutator) Name() string {
func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
t.applyCalled++ t.applyCalled++
return Apply(ctx, b, Seq(t.nestedMutators...)) return ApplySeq(ctx, b, t.nestedMutators...)
} }
func TestMutator(t *testing.T) { func TestMutator(t *testing.T) {

View File

@ -34,7 +34,7 @@ func TestValidateSharedRootPermissionsForShared(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions())
require.Empty(t, diags) require.Empty(t, diags)
} }
@ -59,7 +59,7 @@ func TestValidateSharedRootPermissionsForSharedError(t *testing.T) {
m := mocks.NewMockWorkspaceClient(t) m := mocks.NewMockWorkspaceClient(t)
b.SetWorkpaceClient(m.WorkspaceClient) b.SetWorkpaceClient(m.WorkspaceClient)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions())
require.Len(t, diags, 1) require.Len(t, diags, 1)
require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary) require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary)
require.Equal(t, diag.Warning, diags[0].Severity) require.Equal(t, diag.Warning, diags[0].Severity)

View File

@ -73,7 +73,7 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) {
WorkspaceObjectType: "directories", WorkspaceObjectType: "directories",
}).Return(nil, nil) }).Return(nil, nil)
diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions())) diags := bundle.ApplySeq(context.Background(), b, ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions())
require.Empty(t, diags) require.Empty(t, diags)
} }

View File

@ -1,45 +1,57 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/lock"
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
func Bind(opts *terraform.BindOptions) bundle.Mutator { func Bind(ctx context.Context, b *bundle.Bundle, opts *terraform.BindOptions) (diags diag.Diagnostics) {
return newPhase( log.Info(ctx, "Phase: bind")
"bind",
[]bundle.Mutator{ diags = bundle.Apply(ctx, b, lock.Acquire())
lock.Acquire(), if diags.HasError() {
bundle.Defer( return diags
bundle.Seq( }
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalBind)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Import(opts), terraform.Import(opts),
terraform.StatePush(), terraform.StatePush(),
), ))
lock.Release(lock.GoalBind),
), return diags
},
)
} }
func Unbind(resourceType, resourceKey string) bundle.Mutator { func Unbind(ctx context.Context, b *bundle.Bundle, resourceType, resourceKey string) (diags diag.Diagnostics) {
return newPhase( log.Info(ctx, "Phase: unbind")
"unbind",
[]bundle.Mutator{ diags = bundle.Apply(ctx, b, lock.Acquire())
lock.Acquire(), if diags.HasError() {
bundle.Defer( return diags
bundle.Seq( }
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalUnbind)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Unbind(resourceType, resourceKey), terraform.Unbind(resourceType, resourceKey),
terraform.StatePush(), terraform.StatePush(),
), ))
lock.Release(lock.GoalUnbind),
), return diags
},
)
} }

View File

@ -1,19 +1,23 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts"
"github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/artifacts/whl"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The build phase builds artifacts. // The build phase builds artifacts.
func Build() bundle.Mutator { func Build(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: build")
"build",
[]bundle.Mutator{ return bundle.ApplySeq(ctx, b,
scripts.Execute(config.ScriptPreBuild), scripts.Execute(config.ScriptPreBuild),
whl.DetectPackage(), whl.DetectPackage(),
artifacts.InferMissingProperties(), artifacts.InferMissingProperties(),
@ -23,6 +27,5 @@ func Build() bundle.Mutator {
mutator.ResolveVariableReferences( mutator.ResolveVariableReferences(
"artifacts", "artifacts",
), ),
},
) )
} }

View File

@ -19,6 +19,8 @@ import (
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/bundle/trampoline" "github.com/databricks/cli/bundle/trampoline"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
"github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/sync"
terraformlib "github.com/databricks/cli/libs/terraform" terraformlib "github.com/databricks/cli/libs/terraform"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
@ -124,37 +126,63 @@ is removed from the catalog, but the underlying files are not deleted:`
return approved, nil return approved, nil
} }
// The deploy phase deploys artifacts and resources. func deployCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
// Core mutators that CRUD resources and modify deployment state. These // Core mutators that CRUD resources and modify deployment state. These
// mutators need informed consent if they are potentially destructive. // mutators need informed consent if they are potentially destructive.
deployCore := bundle.Defer( cmdio.LogString(ctx, "Deploying resources...")
bundle.Seq( diags := bundle.Apply(ctx, b, terraform.Apply())
bundle.LogString("Deploying resources..."),
terraform.Apply(), // following original logic, continuing with sequence below even if terraform had errors
),
bundle.Seq( diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePush(), terraform.StatePush(),
terraform.Load(), terraform.Load(),
apps.InterpolateVariables(), apps.InterpolateVariables(),
apps.UploadConfig(), apps.UploadConfig(),
metadata.Compute(), metadata.Compute(),
metadata.Upload(), metadata.Upload(),
bundle.LogString("Deployment complete!"), ))
),
)
deployMutator := bundle.Seq( if !diags.HasError() {
cmdio.LogString(ctx, "Deployment complete!")
}
return diags
}
// The deploy phase deploys artifacts and resources.
func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHandler) (diags diag.Diagnostics) {
log.Info(ctx, "Phase: deploy")
// Core mutators that CRUD resources and modify deployment state. These
// mutators need informed consent if they are potentially destructive.
diags = bundle.ApplySeq(ctx, b,
scripts.Execute(config.ScriptPreDeploy), scripts.Execute(config.ScriptPreDeploy),
lock.Acquire(), lock.Acquire(),
bundle.Defer( )
bundle.Seq(
if diags.HasError() {
// lock is not acquired here
return diags
}
// lock is acquired here
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDeploy)))
}()
diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.CheckDashboardsModifiedRemotely(), terraform.CheckDashboardsModifiedRemotely(),
deploy.StatePull(), deploy.StatePull(),
mutator.ValidateGitDetails(), mutator.ValidateGitDetails(),
artifacts.CleanUp(), artifacts.CleanUp(),
// libraries.CheckForSameNameLibraries() needs to be run after we expand glob references so we
// know what are the actual library paths.
// libraries.ExpandGlobReferences() has to be run after the libraries are built and thus this
// mutator is part of the deploy step rather than validate.
libraries.ExpandGlobReferences(), libraries.ExpandGlobReferences(),
libraries.CheckForSameNameLibraries(),
libraries.Upload(), libraries.Upload(),
trampoline.TransformWheelTask(), trampoline.TransformWheelTask(),
files.Upload(outputHandler), files.Upload(outputHandler),
@ -165,19 +193,27 @@ func Deploy(outputHandler sync.OutputHandler) bundle.Mutator {
terraform.Write(), terraform.Write(),
terraform.CheckRunningResource(), terraform.CheckRunningResource(),
terraform.Plan(terraform.PlanGoal("deploy")), terraform.Plan(terraform.PlanGoal("deploy")),
bundle.If(
approvalForDeploy,
deployCore,
bundle.LogString("Deployment cancelled!"),
),
),
lock.Release(lock.GoalDeploy),
),
scripts.Execute(config.ScriptPostDeploy),
) )
return newPhase( if diags.HasError() {
"deploy", return diags
[]bundle.Mutator{deployMutator}, }
)
haveApproval, err := approvalForDeploy(ctx, b)
if err != nil {
diags = diags.Extend(diag.FromErr(err))
return diags
}
if haveApproval {
diags = diags.Extend(deployCore(ctx, b))
} else {
cmdio.LogString(ctx, "Deployment cancelled!")
}
if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, scripts.Execute(config.ScriptPostDeploy)))
} }

View File

@ -11,6 +11,7 @@ import (
"github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/deploy/terraform"
"github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/cmdio"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
terraformlib "github.com/databricks/cli/libs/terraform" terraformlib "github.com/databricks/cli/libs/terraform"
@ -77,42 +78,65 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) {
return approved, nil return approved, nil
} }
// The destroy phase deletes artifacts and resources. func destroyCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
func Destroy() bundle.Mutator {
// Core destructive mutators for destroy. These require informed user consent. // Core destructive mutators for destroy. These require informed user consent.
destroyCore := bundle.Seq( diags := bundle.ApplySeq(ctx, b,
terraform.Apply(), terraform.Apply(),
files.Delete(), files.Delete(),
bundle.LogString("Destroy complete!"),
) )
destroyMutator := bundle.Seq( if !diags.HasError() {
lock.Acquire(), cmdio.LogString(ctx, "Destroy complete!")
bundle.Defer( }
bundle.Seq(
return diags
}
// The destroy phase deletes artifacts and resources.
func Destroy(ctx context.Context, b *bundle.Bundle) (diags diag.Diagnostics) {
log.Info(ctx, "Phase: destroy")
ok, err := assertRootPathExists(ctx, b)
if err != nil {
return diag.FromErr(err)
}
if !ok {
cmdio.LogString(ctx, "No active deployment found to destroy!")
return diags
}
diags = diags.Extend(bundle.Apply(ctx, b, lock.Acquire()))
if diags.HasError() {
return diags
}
defer func() {
diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDestroy)))
}()
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.Plan(terraform.PlanGoal("destroy")), terraform.Plan(terraform.PlanGoal("destroy")),
bundle.If( ))
approvalForDestroy,
destroyCore,
bundle.LogString("Destroy cancelled!"),
),
),
lock.Release(lock.GoalDestroy),
),
)
return newPhase( if diags.HasError() {
"destroy", return diags
[]bundle.Mutator{ }
// Only run deploy mutator if root path exists.
bundle.If( hasApproval, err := approvalForDestroy(ctx, b)
assertRootPathExists, if err != nil {
destroyMutator, diags = diags.Extend(diag.FromErr(err))
bundle.LogString("No active deployment found to destroy!"), return diags
), }
},
) if hasApproval {
diags = diags.Extend(destroyCore(ctx, b))
} else {
cmdio.LogString(ctx, "Destroy cancelled!")
}
return diags
} }

View File

@ -1,6 +1,8 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/apps" "github.com/databricks/cli/bundle/apps"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
@ -12,16 +14,19 @@ import (
"github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/permissions"
"github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/scripts"
"github.com/databricks/cli/bundle/trampoline" "github.com/databricks/cli/bundle/trampoline"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The initialize phase fills in defaults and connects to the workspace. // The initialize phase fills in defaults and connects to the workspace.
// Interpolation of fields referring to the "bundle" and "workspace" keys // Interpolation of fields referring to the "bundle" and "workspace" keys
// happens upon completion of this phase. // happens upon completion of this phase.
func Initialize() bundle.Mutator { func Initialize(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: initialize")
"initialize",
[]bundle.Mutator{ return bundle.ApplySeq(ctx, b,
validate.AllResourcesHaveValues(), validate.AllResourcesHaveValues(),
validate.NoInterpolationInAuthConfig(),
// Update all path fields in the sync block to be relative to the bundle root path. // Update all path fields in the sync block to be relative to the bundle root path.
mutator.RewriteSyncPaths(), mutator.RewriteSyncPaths(),
@ -101,6 +106,5 @@ func Initialize() bundle.Mutator {
metadata.AnnotatePipelines(), metadata.AnnotatePipelines(),
terraform.Initialize(), terraform.Initialize(),
scripts.Execute(config.ScriptPostInit), scripts.Execute(config.ScriptPostInit),
},
) )
} }

View File

@ -1,29 +1,40 @@
package phases package phases
import ( import (
"context"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
) )
// The load phase loads configuration from disk and performs // The load phase loads configuration from disk and performs
// lightweight preprocessing (anything that can be done without network I/O). // lightweight preprocessing (anything that can be done without network I/O).
func Load() bundle.Mutator { func Load(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
mutator.DefaultMutators(), return mutator.DefaultMutators(ctx, b)
)
} }
func LoadDefaultTarget() bundle.Mutator { func LoadDefaultTarget(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
append(mutator.DefaultMutators(), mutator.SelectDefaultTarget()), diags := mutator.DefaultMutators(ctx, b)
) if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, mutator.SelectDefaultTarget()))
} }
func LoadNamedTarget(target string) bundle.Mutator { func LoadNamedTarget(ctx context.Context, b *bundle.Bundle, target string) diag.Diagnostics {
return newPhase( log.Info(ctx, "Phase: load")
"load",
append(mutator.DefaultMutators(), mutator.SelectTarget(target)), diags := mutator.DefaultMutators(ctx, b)
) if diags.HasError() {
return diags
}
return diags.Extend(bundle.Apply(ctx, b, mutator.SelectTarget(target)))
} }

View File

@ -1,33 +0,0 @@
// Package phases defines build phases as logical groups of [bundle.Mutator] instances.
package phases
import (
"context"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/log"
)
// This phase type groups mutators that belong to a lifecycle phase.
// It expands into the specific mutators when applied.
type phase struct {
name string
mutators []bundle.Mutator
}
func newPhase(name string, mutators []bundle.Mutator) bundle.Mutator {
return &phase{
name: name,
mutators: mutators,
}
}
func (p *phase) Name() string {
return p.name
}
func (p *phase) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
log.Infof(ctx, "Phase: %s", p.Name())
return bundle.Apply(ctx, b, bundle.Seq(p.mutators...))
}

View File

@ -76,10 +76,10 @@ func setupBundle(t *testing.T) (context.Context, *bundle.Bundle, *mocks.MockWork
ctx := cmdio.MockDiscard(context.Background()) ctx := cmdio.MockDiscard(context.Background())
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.ApplySeq(ctx, b,
mutator.DefineDefaultWorkspacePaths(), mutator.DefineDefaultWorkspacePaths(),
mutator.TranslatePaths(), mutator.TranslatePaths(),
)) )
require.Empty(t, diags) require.Empty(t, diags)
return ctx, b, mwc return ctx, b, mwc

View File

@ -1,30 +0,0 @@
package bundle
import (
"context"
"github.com/databricks/cli/libs/diag"
)
type seqMutator struct {
mutators []Mutator
}
func (s *seqMutator) Name() string {
return "seq"
}
func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics {
var diags diag.Diagnostics
for _, m := range s.mutators {
diags = diags.Extend(Apply(ctx, b, m))
if diags.HasError() {
break
}
}
return diags
}
func Seq(ms ...Mutator) Mutator {
return &seqMutator{mutators: ms}
}

View File

@ -1,91 +0,0 @@
package bundle
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSeqMutator(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, m2, m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
}
func TestSeqWithDeferredMutator(t *testing.T) {
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
m4 := &testMutator{}
seqMutator := Seq(m1, Defer(m2, m3), m4)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.NoError(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, m3.applyCalled)
assert.Equal(t, 1, m4.applyCalled)
}
func TestSeqWithErrorAndDeferredMutator(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(errorMut, Defer(m1, m2), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 0, m1.applyCalled)
assert.Equal(t, 0, m2.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}
func TestSeqWithErrorInsideDeferredMutator(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, Defer(errorMut, m2), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}
func TestSeqWithErrorInsideFinallyStage(t *testing.T) {
errorMut := &mutatorWithError{errorMsg: "error msg"}
m1 := &testMutator{}
m2 := &testMutator{}
m3 := &testMutator{}
seqMutator := Seq(m1, Defer(m2, errorMut), m3)
b := &Bundle{}
diags := Apply(context.Background(), b, seqMutator)
assert.Error(t, diags.Error())
assert.Equal(t, 1, m1.applyCalled)
assert.Equal(t, 1, m2.applyCalled)
assert.Equal(t, 1, errorMut.applyCalled)
assert.Equal(t, 0, m3.applyCalled)
}

View File

@ -13,11 +13,10 @@ func TestApps(t *testing.T) {
b := load(t, "./apps") b := load(t, "./apps")
assert.Equal(t, "apps", b.Config.Bundle.Name) assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b, diags := bundle.ApplySeq(context.Background(), b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
)) )
assert.Empty(t, diags) assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"] app := b.Config.Resources.Apps["my_app"]
@ -37,11 +36,10 @@ func TestAppsOverride(t *testing.T) {
b := loadTarget(t, "./apps", "development") b := loadTarget(t, "./apps", "development")
assert.Equal(t, "apps", b.Config.Bundle.Name) assert.Equal(t, "apps", b.Config.Bundle.Name)
diags := bundle.Apply(context.Background(), b, diags := bundle.ApplySeq(context.Background(), b,
bundle.Seq(
mutator.SetVariables(), mutator.SetVariables(),
mutator.ResolveVariableReferences("variables"), mutator.ResolveVariableReferences("variables"),
)) )
assert.Empty(t, diags) assert.Empty(t, diags)
app := b.Config.Resources.Apps["my_app"] app := b.Config.Resources.Apps["my_app"]
assert.Equal(t, "my-app", app.Name) assert.Equal(t, "my-app", app.Name)

View File

@ -17,7 +17,7 @@ func TestIncludeInvalid(t *testing.T) {
ctx := context.Background() ctx := context.Background()
b, err := bundle.Load(ctx, "./include_invalid") b, err := bundle.Load(ctx, "./include_invalid")
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
require.Error(t, diags.Error()) require.Error(t, diags.Error())
assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files") assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files")
} }

View File

@ -20,7 +20,7 @@ func load(t *testing.T, path string) *bundle.Bundle {
ctx := context.Background() ctx := context.Background()
b, err := bundle.Load(ctx, path) b, err := bundle.Load(ctx, path)
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
return b return b
} }
@ -38,8 +38,9 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
return nil, diag.FromErr(err) return nil, diag.FromErr(err)
} }
diags := bundle.Apply(ctx, b, bundle.Seq( diags := phases.LoadNamedTarget(ctx, b, env)
phases.LoadNamedTarget(env),
diags = diags.Extend(bundle.ApplySeq(ctx, b,
mutator.RewriteSyncPaths(), mutator.RewriteSyncPaths(),
mutator.SyncDefaultPath(), mutator.SyncDefaultPath(),
mutator.SyncInferRoot(), mutator.SyncInferRoot(),
@ -69,10 +70,8 @@ func initializeTarget(t *testing.T, path, env string) (*bundle.Bundle, diag.Diag
configureMock(t, b) configureMock(t, b)
ctx := dbr.MockRuntime(context.Background(), false) ctx := dbr.MockRuntime(context.Background(), false)
diags := bundle.Apply(ctx, b, bundle.Seq( diags := bundle.Apply(ctx, b, mutator.SelectTarget(env))
mutator.SelectTarget(env), diags = diags.Extend(phases.Initialize(ctx, b))
phases.Initialize(),
))
return b, diags return b, diags
} }

View File

@ -18,7 +18,7 @@ func TestPythonWheelBuild(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel", "default") b := loadTarget(t, "./python_wheel/python_wheel", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl")
@ -34,7 +34,7 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl")
@ -50,7 +50,7 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl")
@ -66,7 +66,7 @@ func TestPythonWheelWithDBFSLib(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default") b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()
@ -78,7 +78,7 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
mockFiler := mockfiler.NewMockFiler(t) mockFiler := mockfiler.NewMockFiler(t)
@ -90,10 +90,10 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) {
filer.CreateParentDirectories, filer.CreateParentDirectories,
).Return(nil) ).Return(nil)
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
libraries.ExpandGlobReferences(), libraries.ExpandGlobReferences(),
libraries.UploadWithClient(mockFiler), libraries.UploadWithClient(mockFiler),
)) )
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
require.Empty(t, diags) require.Empty(t, diags)
require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl) require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl)
@ -103,7 +103,7 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) {
b := loadTarget(t, "./python_wheel/environment_key", "default") b := loadTarget(t, "./python_wheel/environment_key", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl") matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl")
@ -119,7 +119,7 @@ func TestPythonWheelBuildMultiple(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default") b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl") matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl")
@ -135,7 +135,7 @@ func TestPythonWheelNoBuild(t *testing.T) {
b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default") b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default")
ctx := context.Background() ctx := context.Background()
diags := bundle.Apply(ctx, b, phases.Build()) diags := phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
match := libraries.ExpandGlobReferences() match := libraries.ExpandGlobReferences()

View File

@ -132,7 +132,7 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) {
require.NoError(t, err) require.NoError(t, err)
// The UniqueResourceKeys mutator is run as part of the Load phase. // The UniqueResourceKeys mutator is run as part of the Load phase.
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
assert.Equal(t, tc.diagnostics, diags) assert.Equal(t, tc.diagnostics, diags)
}) })
} }

View File

@ -8,8 +8,8 @@ import (
"strings" "strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config/mutator"
"github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
@ -61,22 +61,30 @@ s = f.getvalue()
dbutils.notebook.exit(s) dbutils.notebook.exit(s)
` `
type transformWheelTask struct{}
func (transformWheelTask) Name() string {
return "TransformWheelTask"
}
// This mutator takes the wheel task and transforms it into notebook // This mutator takes the wheel task and transforms it into notebook
// which installs uploaded wheels using %pip and then calling corresponding // which installs uploaded wheels using %pip and then calling corresponding
// entry point. // entry point.
func TransformWheelTask() bundle.Mutator { func TransformWheelTask() bundle.Mutator {
return bundle.If( return transformWheelTask{}
func(_ context.Context, b *bundle.Bundle) (bool, error) { }
res := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper
return res, nil func (transformWheelTask) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
}, isEnabled := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper
NewTrampoline( if !isEnabled {
return nil
}
return bundle.Apply(ctx, b, NewTrampoline(
"python_wheel", "python_wheel",
&pythonTrampoline{}, &pythonTrampoline{},
NOTEBOOK_TEMPLATE, NOTEBOOK_TEMPLATE,
), ))
mutator.NoOp(),
)
} }
type pythonTrampoline struct{} type pythonTrampoline struct{}

View File

@ -69,14 +69,19 @@ func newDeployCommand() *cobra.Command {
} }
} }
diags = diags.Extend( diags = diags.Extend(phases.Initialize(ctx, b))
bundle.Apply(ctx, b, bundle.Seq(
phases.Initialize(), if !diags.HasError() {
validate.FastValidate(), diags = diags.Extend(bundle.Apply(ctx, b, validate.FastValidate()))
phases.Build(), }
phases.Deploy(outputHandler),
)), if !diags.HasError() {
) diags = diags.Extend(phases.Build(ctx, b))
}
if !diags.HasError() {
diags = diags.Extend(phases.Deploy(ctx, b, outputHandler))
}
} }
renderOpts := render.RenderOptions{RenderSummaryTable: false} renderOpts := render.RenderOptions{RenderSummaryTable: false}

View File

@ -53,15 +53,15 @@ func newBindCommand() *cobra.Command {
return nil return nil
}) })
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(), if !diags.HasError() {
phases.Bind(&terraform.BindOptions{ diags = diags.Extend(phases.Bind(ctx, b, &terraform.BindOptions{
AutoApprove: autoApprove, AutoApprove: autoApprove,
ResourceType: resource.TerraformResourceName(), ResourceType: resource.TerraformResourceName(),
ResourceKey: args[0], ResourceKey: args[0],
ResourceId: args[1], ResourceId: args[1],
}), }))
)) }
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return fmt.Errorf("failed to bind the resource, err: %w", err) return fmt.Errorf("failed to bind the resource, err: %w", err)
} }

View File

@ -38,10 +38,10 @@ func newUnbindCommand() *cobra.Command {
return nil return nil
}) })
diags = bundle.Apply(cmd.Context(), b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(), if !diags.HasError() {
phases.Unbind(resource.TerraformResourceName(), args[0]), diags = diags.Extend(phases.Unbind(ctx, b, resource.TerraformResourceName(), args[0]))
)) }
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -61,20 +61,25 @@ func newDestroyCommand() *cobra.Command {
return errors.New("please specify --auto-approve since selected logging format is json") return errors.New("please specify --auto-approve since selected logging format is json")
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(),
// We need to resolve artifact variable (how we do it in build phase)
// because some of the to-be-destroyed resource might use this variable.
// Not resolving might lead to terraform "Reference to undeclared resource" error
mutator.ResolveVariableReferences(
"artifacts",
),
phases.Destroy(),
))
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
return nil
diags = diags.Extend(
// We need to resolve artifact variable (how we do it in build phase)
// because some of the to-be-destroyed resource might use this variable.
// Not resolving might lead to terraform "Reference to undeclared resource" error
bundle.Apply(ctx, b, mutator.ResolveVariableReferences("artifacts")),
)
if err := diags.Error(); err != nil {
return err
}
diags = diags.Extend(phases.Destroy(ctx, b))
// QQQ we're not reporting warnings there. This would be addressed by switching to streaming warnings/errors instead of accumulating.
return diags.Error()
} }
return cmd return cmd

View File

@ -345,8 +345,12 @@ func (d *dashboard) initialize(b *bundle.Bundle) diag.Diagnostics {
} }
func (d *dashboard) runForResource(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (d *dashboard) runForResource(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags := bundle.Apply(ctx, b, bundle.Seq( diags := phases.Initialize(ctx, b)
phases.Initialize(), if diags.HasError() {
return diags
}
diags = diags.Extend(bundle.ApplySeq(ctx, b,
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.StatePull(), terraform.StatePull(),

View File

@ -67,7 +67,7 @@ func newOpenCommand() *cobra.Command {
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -86,20 +86,20 @@ func newOpenCommand() *cobra.Command {
noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist)
if forcePull || noCache { if forcePull || noCache {
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.Load(), terraform.Load(),
mutator.InitializeURLs(), mutator.InitializeURLs(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -111,7 +111,7 @@ task or a Python wheel task, the second example applies.
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -121,12 +121,12 @@ task or a Python wheel task, the second example applies.
return err return err
} }
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
terraform.StatePull(), terraform.StatePull(),
terraform.Load(terraform.ErrorOnEmptyState), terraform.Load(terraform.ErrorOnEmptyState),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -35,7 +35,7 @@ func newSummaryCommand() *cobra.Command {
return diags.Error() return diags.Error()
} }
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
@ -49,18 +49,20 @@ func newSummaryCommand() *cobra.Command {
noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist)
if forcePull || noCache { if forcePull || noCache {
diags = bundle.Apply(ctx, b, bundle.Seq( diags = bundle.ApplySeq(ctx, b,
terraform.StatePull(), terraform.StatePull(),
terraform.Interpolate(), terraform.Interpolate(),
terraform.Write(), terraform.Write(),
)) )
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }
} }
diags = bundle.Apply(ctx, b, diags = bundle.ApplySeq(ctx, b,
bundle.Seq(terraform.Load(), mutator.InitializeURLs())) terraform.Load(),
mutator.InitializeURLs(),
)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -71,7 +71,7 @@ func newSyncCommand() *cobra.Command {
} }
// Run initialize phase to make sure paths are set. // Run initialize phase to make sure paths are set.
diags = bundle.Apply(ctx, b, phases.Initialize()) diags = phases.Initialize(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
return err return err
} }

View File

@ -46,7 +46,7 @@ func newValidateCommand() *cobra.Command {
} }
if !diags.HasError() { if !diags.HasError() {
diags = diags.Extend(bundle.Apply(ctx, b, phases.Initialize())) diags = diags.Extend(phases.Initialize(ctx, b))
} }
if !diags.HasError() { if !diags.HasError() {

View File

@ -74,16 +74,15 @@ func configureProfile(cmd *cobra.Command, b *bundle.Bundle) diag.Diagnostics {
// configureBundle loads the bundle configuration and configures flag values, if any. // configureBundle loads the bundle configuration and configures flag values, if any.
func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) { func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) {
var m bundle.Mutator
if target := getTarget(cmd); target == "" {
m = phases.LoadDefaultTarget()
} else {
m = phases.LoadNamedTarget(target)
}
// Load bundle and select target. // Load bundle and select target.
ctx := cmd.Context() ctx := cmd.Context()
diags := bundle.Apply(ctx, b, m) var diags diag.Diagnostics
if target := getTarget(cmd); target == "" {
diags = phases.LoadDefaultTarget(ctx, b)
} else {
diags = phases.LoadNamedTarget(ctx, b, target)
}
if diags.HasError() { if diags.HasError() {
return b, diags return b, diags
} }
@ -159,7 +158,7 @@ func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]s
} }
// Load bundle but don't select a target (we're completing those). // Load bundle but don't select a target (we're completing those).
diags := bundle.Apply(ctx, b, phases.Load()) diags := phases.Load(ctx, b)
if err := diags.Error(); err != nil { if err := diags.Error(); err != nil {
cobra.CompErrorln(err.Error()) cobra.CompErrorln(err.Error())
return nil, cobra.ShellCompDirectiveError return nil, cobra.ShellCompDirectiveError

View File

@ -8,6 +8,7 @@ import (
"log/slog" "log/slog"
"os" "os"
"runtime" "runtime"
"runtime/debug"
"slices" "slices"
"strings" "strings"
"time" "time"
@ -102,14 +103,39 @@ func flagErrorFunc(c *cobra.Command, err error) error {
// Execute adds all child commands to the root command and sets flags appropriately. // Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd. // This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute(ctx context.Context, cmd *cobra.Command) error { func Execute(ctx context.Context, cmd *cobra.Command) (err error) {
// TODO: deferred panic recovery defer func() {
r := recover()
// No panic. Return normally.
if r == nil {
return
}
version := build.GetInfo().Version
trace := debug.Stack()
// Set the error so that the CLI exits with a non-zero exit code.
err = fmt.Errorf("panic: %v", r)
fmt.Fprintf(cmd.ErrOrStderr(), `The Databricks CLI unexpectedly had a fatal error.
Please report this issue to Databricks in the form of a GitHub issue at:
https://github.com/databricks/cli
CLI Version: %s
Panic Payload: %v
Stack Trace:
%s`, version, r, string(trace))
}()
ctx = telemetry.WithNewLogger(ctx) ctx = telemetry.WithNewLogger(ctx)
ctx = dbr.DetectRuntime(ctx) ctx = dbr.DetectRuntime(ctx)
startTime := time.Now() startTime := time.Now()
// Run the command // Run the command
cmd, err := cmd.ExecuteContextC(ctx) cmd, err = cmd.ExecuteContextC(ctx)
if err != nil && !errors.Is(err, ErrAlreadyPrinted) { if err != nil && !errors.Is(err, ErrAlreadyPrinted) {
// If cmdio logger initialization succeeds, then this function logs with the // If cmdio logger initialization succeeds, then this function logs with the
// initialized cmdio logger, otherwise with the default cmdio logger // initialized cmdio logger, otherwise with the default cmdio logger

12
cmd/selftest/panic.go Normal file
View File

@ -0,0 +1,12 @@
package selftest
import "github.com/spf13/cobra"
func newPanic() *cobra.Command {
return &cobra.Command{
Use: "panic",
Run: func(cmd *cobra.Command, args []string) {
panic("the databricks selftest panic command always panics")
},
}
}

View File

@ -12,5 +12,6 @@ func New() *cobra.Command {
} }
cmd.AddCommand(newSendTelemetry()) cmd.AddCommand(newSendTelemetry())
cmd.AddCommand(newPanic())
return cmd return cmd
} }

View File

@ -80,7 +80,7 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.
@ -144,7 +144,7 @@ func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.
@ -213,7 +213,7 @@ func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) {
}, },
} }
diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// The remote path attribute on the artifact file should have been set. // The remote path attribute on the artifact file should have been set.

View File

@ -11,6 +11,50 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
// sparkJarTestCase defines a Databricks runtime version and a local Java version requirement
type sparkJarTestCase struct {
name string // Test name
runtimeVersion string // The Spark runtime version to test
requiredJavaVersion string // Java version that can compile jar to pass this test
}
// runSparkJarTests runs a set of test cases with appropriate Java version checks
// testRunner is the function that runs the actual test with the runtime version
func runSparkJarTests(t *testing.T, testCases []sparkJarTestCase, testRunner func(t *testing.T, runtimeVersion string)) {
t.Helper()
testCanRun := make(map[string]bool)
atLeastOneCanRun := false
for _, tc := range testCases {
if testutil.HasJDK(t, context.Background(), tc.requiredJavaVersion) {
testCanRun[tc.name] = true
atLeastOneCanRun = true
continue
}
testCanRun[tc.name] = false
}
if !atLeastOneCanRun {
t.Fatal("At least one test is required to pass. All tests were skipped because no compatible Java version was found.")
}
// Run the tests that can run
for _, tc := range testCases {
tc := tc // Capture range variable for goroutine
canRun := testCanRun[tc.name]
t.Run(tc.name, func(t *testing.T) {
if !canRun {
t.Skipf("Skipping %s: requires Java version %v", tc.name, tc.requiredJavaVersion)
return
}
t.Parallel()
testRunner(t, tc.runtimeVersion)
})
}
}
func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) { func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) {
nodeTypeId := testutil.GetCloud(t).NodeTypeID() nodeTypeId := testutil.GetCloud(t).NodeTypeID()
tmpDir := t.TempDir() tmpDir := t.TempDir()
@ -54,46 +98,60 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
} }
func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions: // Failure on earlier DBR versions:
// //
// JAR installation from Volumes is supported on UC Clusters with DBR >= 13.3. // JAR installation from Volumes is supported on UC Clusters with DBR >= 13.3.
// Denied library is Jar(/Volumes/main/test-schema-ldgaklhcahlg/my-volume/.internal/PrintArgs.jar) // Denied library is Jar(/Volumes/main/test-schema-ldgaklhcahlg/my-volume/.internal/PrintArgs.jar)
// //
versions := []string{ testCases := []sparkJarTestCase{
"13.3.x-scala2.12", // 13.3 LTS (includes Apache Spark 3.4.1, Scala 2.12) {
"14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) name: "Databricks Runtime 13.3 LTS",
"15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12) runtimeVersion: "13.3.x-scala2.12", // 13.3 LTS (includes Apache Spark 3.4.1, Scala 2.12)
} requiredJavaVersion: "1.8.0", // Only JDK 8 is supported
},
for _, version := range versions { {
t.Run(version, func(t *testing.T) { name: "Databricks Runtime 14.3 LTS",
t.Parallel() runtimeVersion: "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12)
runSparkJarTestFromVolume(t, version) requiredJavaVersion: "1.8.0", // Only JDK 8 is supported
}) },
{
name: "Databricks Runtime 15.4 LTS",
runtimeVersion: "15.4.x-scala2.12", // 15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12)
requiredJavaVersion: "1.8.0", // Only JDK 8 is supported
},
{
name: "Databricks Runtime 16.2",
runtimeVersion: "16.2.x-scala2.12", // 16.2 (includes Apache Spark 3.5.2, Scala 2.12)
requiredJavaVersion: "11.0", // Can run jars compiled by Java 11
},
} }
runSparkJarTests(t, testCases, runSparkJarTestFromVolume)
} }
func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
testutil.RequireJDK(t, context.Background(), "1.8.0")
// Failure on earlier DBR versions: // Failure on earlier DBR versions:
// //
// Library from /Workspace is not allowed on this cluster. // Library from /Workspace is not allowed on this cluster.
// Please switch to using DBR 14.1+ No Isolation Shared or DBR 13.1+ Shared cluster or 13.2+ Assigned cluster to use /Workspace libraries. // Please switch to using DBR 14.1+ No Isolation Shared or DBR 13.1+ Shared cluster or 13.2+ Assigned cluster to use /Workspace libraries.
// //
versions := []string{ testCases := []sparkJarTestCase{
"14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) {
"15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12) name: "Databricks Runtime 14.3 LTS",
} runtimeVersion: "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12)
requiredJavaVersion: "1.8.0", // Only JDK 8 is supported
for _, version := range versions { },
t.Run(version, func(t *testing.T) { {
t.Parallel() name: "Databricks Runtime 15.4 LTS",
runSparkJarTestFromWorkspace(t, version) runtimeVersion: "15.4.x-scala2.12", // 15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12)
}) requiredJavaVersion: "1.8.0", // Only JDK 8 is supported
},
{
name: "Databricks Runtime 16.2",
runtimeVersion: "16.2.x-scala2.12", // 16.2 (includes Apache Spark 3.5.2, Scala 2.12)
requiredJavaVersion: "11.0", // Can run jars compiled by Java 11
},
} }
runSparkJarTests(t, testCases, runSparkJarTestFromWorkspace)
} }

View File

@ -1,23 +1,44 @@
package testutil package testutil
import ( import (
"bytes"
"context" "context"
"os/exec" "os/exec"
"strings" "strings"
"github.com/stretchr/testify/require"
) )
func RequireJDK(t TestingT, ctx context.Context, version string) { // HasJDK checks if the specified Java version is available in the system.
var stderr bytes.Buffer // It returns true if the required JDK version is present, false otherwise.
// This is a non-failing variant of RequireJDK.
//
// Example output of `java -version` in eclipse-temurin:8:
// openjdk version "1.8.0_442"
// OpenJDK Runtime Environment (Temurin)(build 1.8.0_442-b06)
// OpenJDK 64-Bit Server VM (Temurin)(build 25.442-b06, mixed mode)
//
// Example output of `java -version` in java11 (homebrew):
// openjdk version "11.0.26" 2025-01-21
// OpenJDK Runtime Environment Homebrew (build 11.0.26+0)
// OpenJDK 64-Bit Server VM Homebrew (build 11.0.26+0, mixed mode)
func HasJDK(t TestingT, ctx context.Context, version string) bool {
t.Helper()
cmd := exec.Command("javac", "-version") // Try to execute "java -version" command
cmd.Stderr = &stderr cmd := exec.CommandContext(ctx, "java", "-version")
err := cmd.Run() output, err := cmd.CombinedOutput()
require.NoError(t, err, "Unable to run javac -version") if err != nil {
t.Logf("Failed to execute java -version: %v", err)
return false
}
// Get the first line of the output javaVersionOutput := string(output)
line := strings.Split(stderr.String(), "\n")[0]
require.Contains(t, line, version, "Expected JDK version %s, got %s", version, line) // Check if the output contains the expected version
expectedVersionString := "version \"" + version
if strings.Contains(javaVersionOutput, expectedVersionString) {
t.Logf("Detected JDK version %s", version)
return true
}
t.Logf("Required JDK version %s not found, instead got: %s", version, javaVersionOutput)
return false
} }

View File

@ -38,3 +38,17 @@ func EnvVars() []string {
return out return out
} }
func GetEnvFor(name string) (string, bool) {
for _, attr := range config.ConfigAttributes {
if attr.Name != name {
continue
}
if len(attr.EnvVars) == 0 {
return "", false
}
return attr.EnvVars[0], true
}
return "", false
}

View File

@ -77,3 +77,42 @@ func TestAuthEnvVars(t *testing.T) {
out := EnvVars() out := EnvVars()
assert.Equal(t, expected, out) assert.Equal(t, expected, out)
} }
func TestGetEnvFor(t *testing.T) {
tcases := []struct {
name string
expected string
}{
// Generic attributes.
{"host", "DATABRICKS_HOST"},
{"profile", "DATABRICKS_CONFIG_PROFILE"},
{"auth_type", "DATABRICKS_AUTH_TYPE"},
{"metadata_service_url", "DATABRICKS_METADATA_SERVICE_URL"},
// OAuth specific attributes.
{"client_id", "DATABRICKS_CLIENT_ID"},
// Google specific attributes.
{"google_service_account", "DATABRICKS_GOOGLE_SERVICE_ACCOUNT"},
// Azure specific attributes.
{"azure_workspace_resource_id", "DATABRICKS_AZURE_RESOURCE_ID"},
{"azure_use_msi", "ARM_USE_MSI"},
{"azure_client_id", "ARM_CLIENT_ID"},
{"azure_tenant_id", "ARM_TENANT_ID"},
{"azure_environment", "ARM_ENVIRONMENT"},
{"azure_login_app_id", "DATABRICKS_AZURE_LOGIN_APP_ID"},
}
for _, tcase := range tcases {
t.Run(tcase.name, func(t *testing.T) {
out, ok := GetEnvFor(tcase.name)
assert.True(t, ok)
assert.Equal(t, tcase.expected, out)
})
}
out, ok := GetEnvFor("notfound")
assert.False(t, ok)
assert.Empty(t, out)
}

View File

@ -76,6 +76,10 @@ func IsPureVariableReference(s string) bool {
return len(s) > 0 && re.FindString(s) == s return len(s) > 0 && re.FindString(s) == s
} }
func ContainsVariableReference(s string) bool {
return re.MatchString(s)
}
// If s is a pure variable reference, this function returns the corresponding // If s is a pure variable reference, this function returns the corresponding
// dyn.Path. Otherwise, it returns false. // dyn.Path. Otherwise, it returns false.
func PureReferenceToPath(s string) (dyn.Path, bool) { func PureReferenceToPath(s string) (dyn.Path, bool) {

View File

@ -78,7 +78,7 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri
b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project")) b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project"))
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, phases.LoadNamedTarget(target)) diags := phases.LoadNamedTarget(ctx, b, target)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Apply initialize / validation mutators // Apply initialize / validation mutators
@ -93,14 +93,12 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri
b.Tagging = tags.ForCloud(w.Config) b.Tagging = tags.ForCloud(w.Config)
b.WorkspaceClient() b.WorkspaceClient()
diags = bundle.Apply(ctx, b, bundle.Seq( diags = phases.Initialize(ctx, b)
phases.Initialize(),
))
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
// Apply build mutator // Apply build mutator
if build { if build {
diags = bundle.Apply(ctx, b, phases.Build()) diags = phases.Build(ctx, b)
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
} }
} }

View File

@ -2,6 +2,7 @@ package testserver
import ( import (
"encoding/json" "encoding/json"
"fmt"
"io" "io"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
@ -191,24 +192,23 @@ func New(t testutil.TestingT) *Server {
// Set up the not found handler as fallback // Set up the not found handler as fallback
router.NotFoundHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { router.NotFoundHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
pattern := r.Method + " " + r.URL.Path pattern := r.Method + " " + r.URL.Path
bodyBytes, err := io.ReadAll(r.Body)
var body string
if err != nil {
body = fmt.Sprintf("failed to read the body: %s", err)
} else {
body = fmt.Sprintf("[%d bytes] %s", len(bodyBytes), bodyBytes)
}
t.Errorf(` t.Errorf(`No handler for URL: %s
Body: %s
---------------------------------------- For acceptance tests, add this to test.toml:
No stub found for pattern: %s
To stub a response for this request, you can add
the following to test.toml:
[[Server]] [[Server]]
Pattern = %q Pattern = %q
Response.Body = ''' Response.Body = '<response body here>'
<response body here> # Response.StatusCode = <response code if not 200>
''' `, r.URL, body, pattern)
Response.StatusCode = <response status-code here>
----------------------------------------
`, pattern, pattern)
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusNotImplemented) w.WriteHeader(http.StatusNotImplemented)