diff --git a/CHANGELOG.md b/CHANGELOG.md index 23c696ab7..fad9ce620 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,29 @@ # Version changelog +## [Release] Release v0.242.0 + +Notable changes: +Starting this version CLI does not load bundle auth information when CLI command is executed inside the bundle directory with explicitly provided via `-p` flag profile. +For more details see the related GitHub issue https://github.com/databricks/cli/issues/1358 + +CLI: + * Do not load host from bundle for CLI commands when profile flag is used ([#2335](https://github.com/databricks/cli/pull/2335)). + * Fixed accessing required path parameters in CLI generation when --json flag ([#2373](https://github.com/databricks/cli/pull/2373)). + +Bundles: + * Provide instructions for testing in the default-python template ([#2355](https://github.com/databricks/cli/pull/2355)). + * Remove `run_as` from the built-in templates ([#2044](https://github.com/databricks/cli/pull/2044)). + * Change warning about incomplete permissions section into a recommendation ([#2043](https://github.com/databricks/cli/pull/2043)). + * Refine `mode: production` diagnostic output ([#2236](https://github.com/databricks/cli/pull/2236)). + * Support serverless mode in default-python template (explicit prompt) ([#2377](https://github.com/databricks/cli/pull/2377)). + * Set default data_security_mode to "SINGLE_USER" in bundle templates ([#2372](https://github.com/databricks/cli/pull/2372)). + * Fixed spark version check for clusters defined in the same bundle ([#2374](https://github.com/databricks/cli/pull/2374)). + +API Changes: + * Added `databricks genie get-message-query-result-by-attachment` command. + +OpenAPI commit 99f644e72261ef5ecf8d74db20f4b7a1e09723cc (2025-02-11) + ## [Release] Release v0.241.2 This is a bugfix release to address an issue where jobs with tasks with a diff --git a/acceptance/bundle/artifacts/same_name_libraries/databricks.yml b/acceptance/bundle/artifacts/same_name_libraries/databricks.yml new file mode 100644 index 000000000..d58674a64 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/databricks.yml @@ -0,0 +1,54 @@ +bundle: + name: same_name_libraries + +variables: + cluster: + default: + spark_version: 15.4.x-scala2.12 + node_type_id: i3.xlarge + data_security_mode: SINGLE_USER + num_workers: 0 + spark_conf: + spark.master: "local[*, 4]" + spark.databricks.cluster.profile: singleNode + custom_tags: + ResourceClass: SingleNode + +artifacts: + whl1: + type: whl + path: ./whl1 + whl2: + type: whl + path: ./whl2 + +resources: + jobs: + test: + name: "test" + tasks: + - task_key: task1 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_default_python + libraries: + - whl: ./whl1/dist/*.whl + - pypi: + package: test_package + - task_key: task2 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_default_python + libraries: + - whl: ./whl2/dist/*.whl + - maven: + coordinates: org.apache.spark:spark-sql_2.12:3.1.1 + - task_key: task3 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_default_python + libraries: + - whl: ./whl1/dist/*.whl diff --git a/acceptance/bundle/artifacts/same_name_libraries/output.txt b/acceptance/bundle/artifacts/same_name_libraries/output.txt new file mode 100644 index 000000000..ee6c9d566 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/output.txt @@ -0,0 +1,14 @@ + +>>> errcode [CLI] bundle deploy +Building whl1... +Building whl2... +Error: Duplicate local library names: my_default_python-0.0.1-py3-none-any.whl + at resources.jobs.test.tasks[0].libraries[0].whl + resources.jobs.test.tasks[1].libraries[0].whl + in databricks.yml:36:15 + databricks.yml:45:15 + +Local library names must be unique but found libraries with the same name: whl1/dist/my_default_python-0.0.1-py3-none-any.whl, whl2/dist/my_default_python-0.0.1-py3-none-any.whl + + +Exit code: 1 diff --git a/acceptance/bundle/artifacts/same_name_libraries/script b/acceptance/bundle/artifacts/same_name_libraries/script new file mode 100644 index 000000000..6c899df07 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/script @@ -0,0 +1,2 @@ +trace errcode $CLI bundle deploy +rm -rf whl1 whl2 diff --git a/acceptance/bundle/artifacts/same_name_libraries/test.toml b/acceptance/bundle/artifacts/same_name_libraries/test.toml new file mode 100644 index 000000000..a298217f2 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/test.toml @@ -0,0 +1,3 @@ +[[Repls]] +Old = '\\' +New = '/' diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl1/setup.py b/acceptance/bundle/artifacts/same_name_libraries/whl1/setup.py new file mode 100644 index 000000000..ddd81295e --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl1/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup, find_packages + +import sys + +sys.path.append("./src") + +import my_default_python + +setup( + name="my_default_python", + version=my_default_python.__version__, + url="https://databricks.com", + author="[USERNAME]", + description="wheel file based on my_default_python/src", + packages=find_packages(where="./src"), + package_dir={"": "src"}, + entry_points={ + "packages": [ + "main=my_default_python.main:main", + ], + }, + install_requires=[ + # Dependencies in case the output wheel file is used as a library dependency. + # For defining dependencies, when this package is used in Databricks, see: + # https://docs.databricks.com/dev-tools/bundles/library-dependencies.html + "setuptools" + ], +) diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/__init__.py b/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/main.py b/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/main.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl1/src/my_default_python/main.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl2/setup.py b/acceptance/bundle/artifacts/same_name_libraries/whl2/setup.py new file mode 100644 index 000000000..ddd81295e --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl2/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup, find_packages + +import sys + +sys.path.append("./src") + +import my_default_python + +setup( + name="my_default_python", + version=my_default_python.__version__, + url="https://databricks.com", + author="[USERNAME]", + description="wheel file based on my_default_python/src", + packages=find_packages(where="./src"), + package_dir={"": "src"}, + entry_points={ + "packages": [ + "main=my_default_python.main:main", + ], + }, + install_requires=[ + # Dependencies in case the output wheel file is used as a library dependency. + # For defining dependencies, when this package is used in Databricks, see: + # https://docs.databricks.com/dev-tools/bundles/library-dependencies.html + "setuptools" + ], +) diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/__init__.py b/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/main.py b/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/main.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/acceptance/bundle/artifacts/same_name_libraries/whl2/src/my_default_python/main.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/artifacts/unique_name_libraries/databricks.yml b/acceptance/bundle/artifacts/unique_name_libraries/databricks.yml new file mode 100644 index 000000000..dd13c1918 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/databricks.yml @@ -0,0 +1,56 @@ +bundle: + name: unique_name_libraries + +variables: + cluster: + default: + spark_version: 15.4.x-scala2.12 + node_type_id: i3.xlarge + data_security_mode: SINGLE_USER + num_workers: 0 + spark_conf: + spark.master: "local[*, 4]" + spark.databricks.cluster.profile: singleNode + custom_tags: + ResourceClass: SingleNode + +artifacts: + whl1: + type: whl + path: ./whl1 + whl2: + type: whl + path: ./whl2 + +resources: + jobs: + test: + name: "test" + tasks: + - task_key: task1 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_package + libraries: + - whl: ./whl1/dist/*.whl + - whl: cowsay + - pypi: + package: test_package + - task_key: task2 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_other_package + libraries: + - whl: ./whl2/dist/*.whl + - whl: cowsay + - maven: + coordinates: org.apache.spark:spark-sql_2.12:3.1.1 + - task_key: task3 + new_cluster: ${var.cluster} + python_wheel_task: + entry_point: main + package_name: my_default_python + libraries: + - whl: ./whl1/dist/*.whl diff --git a/acceptance/bundle/artifacts/unique_name_libraries/output.txt b/acceptance/bundle/artifacts/unique_name_libraries/output.txt new file mode 100644 index 000000000..ecc7bf57b --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/output.txt @@ -0,0 +1,10 @@ + +>>> errcode [CLI] bundle deploy +Building whl1... +Building whl2... +Uploading [package name] +Uploading [package name] +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/unique_name_libraries/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! diff --git a/acceptance/bundle/artifacts/unique_name_libraries/script b/acceptance/bundle/artifacts/unique_name_libraries/script new file mode 100644 index 000000000..6c899df07 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/script @@ -0,0 +1,2 @@ +trace errcode $CLI bundle deploy +rm -rf whl1 whl2 diff --git a/acceptance/bundle/artifacts/unique_name_libraries/test.toml b/acceptance/bundle/artifacts/unique_name_libraries/test.toml new file mode 100644 index 000000000..280338bd6 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/test.toml @@ -0,0 +1,6 @@ +Cloud = false + +# The order in which files are uploaded can be different, so we just replace the name +[[Repls]] +Old="Uploading .*-0.0.1-py3-none-any.whl..." +New="Uploading [package name]" diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl1/setup.py b/acceptance/bundle/artifacts/unique_name_libraries/whl1/setup.py new file mode 100644 index 000000000..ca85def32 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl1/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup, find_packages + +import sys + +sys.path.append("./src") + +import my_package + +setup( + name="my_package", + version=my_package.__version__, + url="https://databricks.com", + author="[USERNAME]", + description="wheel file based on my_package/src", + packages=find_packages(where="./src"), + package_dir={"": "src"}, + entry_points={ + "packages": [ + "main=my_package.main:main", + ], + }, + install_requires=[ + # Dependencies in case the output wheel file is used as a library dependency. + # For defining dependencies, when this package is used in Databricks, see: + # https://docs.databricks.com/dev-tools/bundles/library-dependencies.html + "setuptools" + ], +) diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/__init__.py b/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/main.py b/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/main.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl1/src/my_package/main.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl2/setup.py b/acceptance/bundle/artifacts/unique_name_libraries/whl2/setup.py new file mode 100644 index 000000000..5e5b34788 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl2/setup.py @@ -0,0 +1,28 @@ +from setuptools import setup, find_packages + +import sys + +sys.path.append("./src") + +import my_other_package + +setup( + name="my_other_package", + version=my_other_package.__version__, + url="https://databricks.com", + author="[USERNAME]", + description="wheel file based on my_other_package/src", + packages=find_packages(where="./src"), + package_dir={"": "src"}, + entry_points={ + "packages": [ + "main=my_other_package.main:main", + ], + }, + install_requires=[ + # Dependencies in case the output wheel file is used as a library dependency. + # For defining dependencies, when this package is used in Databricks, see: + # https://docs.databricks.com/dev-tools/bundles/library-dependencies.html + "setuptools" + ], +) diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/__init__.py b/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/main.py b/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/main.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/acceptance/bundle/artifacts/unique_name_libraries/whl2/src/my_other_package/main.py @@ -0,0 +1 @@ +print("hello") diff --git a/acceptance/bundle/debug/out.stderr.txt b/acceptance/bundle/debug/out.stderr.txt index e5867e008..147b33a45 100644 --- a/acceptance/bundle/debug/out.stderr.txt +++ b/acceptance/bundle/debug/out.stderr.txt @@ -1,81 +1,77 @@ 10:07:59 Info: start pid=12345 version=[DEV_VERSION] args="[CLI], bundle, validate, --debug" 10:07:59 Debug: Found bundle root at [TMPDIR] (file [TMPDIR]/databricks.yml) pid=12345 -10:07:59 Debug: Apply pid=12345 mutator=load -10:07:59 Info: Phase: load pid=12345 mutator=load -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EntryPoint -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=scripts.preinit -10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=load mutator=seq mutator=scripts.preinit -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ProcessRootIncludes mutator=seq -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=VerifyCliVersion -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=EnvironmentsToTargets -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=ComputeIdToClusterId -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=InitializeVariables -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=DefineDefaultTarget(default) -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=PythonMutator(load) -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=validate:unique_resource_keys -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget -10:07:59 Debug: Apply pid=12345 mutator=load mutator=seq mutator=SelectDefaultTarget mutator=SelectTarget(default) +10:07:59 Info: Phase: load pid=12345 +10:07:59 Debug: Apply pid=12345 mutator=EntryPoint +10:07:59 Debug: Apply pid=12345 mutator=scripts.preinit +10:07:59 Debug: No script defined for preinit, skipping pid=12345 mutator=scripts.preinit +10:07:59 Debug: Apply pid=12345 mutator=ProcessRootIncludes +10:07:59 Debug: Apply pid=12345 mutator=VerifyCliVersion +10:07:59 Debug: Apply pid=12345 mutator=EnvironmentsToTargets +10:07:59 Debug: Apply pid=12345 mutator=ComputeIdToClusterId +10:07:59 Debug: Apply pid=12345 mutator=InitializeVariables +10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultTarget(default) +10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load) +10:07:59 Debug: Apply pid=12345 mutator=validate:unique_resource_keys +10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget +10:07:59 Debug: Apply pid=12345 mutator=SelectDefaultTarget mutator=SelectTarget(default) 10:07:59 Debug: Apply pid=12345 mutator= -10:07:59 Debug: Apply pid=12345 mutator=initialize -10:07:59 Info: Phase: initialize pid=12345 mutator=initialize -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=validate:AllResourcesHaveValues -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteSyncPaths -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncDefaultPath -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SyncInferRoot -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser +10:07:59 Info: Phase: initialize pid=12345 +10:07:59 Debug: Apply pid=12345 mutator=validate:AllResourcesHaveValues +10:07:59 Debug: Apply pid=12345 mutator=validate:interpolation_in_auth_config +10:07:59 Debug: Apply pid=12345 mutator=RewriteSyncPaths +10:07:59 Debug: Apply pid=12345 mutator=SyncDefaultPath +10:07:59 Debug: Apply pid=12345 mutator=SyncInferRoot +10:07:59 Debug: Apply pid=12345 mutator=PopulateCurrentUser 10:07:59 Debug: GET /api/2.0/preview/scim/v2/Me < HTTP/1.1 200 OK < { < "id": "[USERID]", < "userName": "[USERNAME]" -< } pid=12345 mutator=initialize mutator=seq mutator=PopulateCurrentUser sdk=true -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=LoadGitDetails -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplySourceLinkedDeploymentPreset -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefineDefaultWorkspaceRoot -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandWorkspaceRoot -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultWorkspacePaths -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PrependWorkspacePrefix -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=RewriteWorkspacePrefix -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetVariables -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(init) -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(load_resources) -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonMutator(apply_mutators) -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveResourceReferences -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ResolveVariableReferences -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobClusters -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobParameters -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeJobTasks -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergePipelineClusters -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=MergeApps -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CaptureSchemaDependency -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=CheckPermissions -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=SetRunAs -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=OverrideCompute -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureDashboardDefaults -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureVolumeDefaults -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ProcessTargetMode -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyPresets -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=DefaultQueueing -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ExpandPipelineGlobPaths -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ConfigureWSFS -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=TranslatePaths -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=PythonWrapperWarning -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=apps.Validate -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ValidateSharedRootPermissions -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=ApplyBundlePermissions -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=FilterCurrentUserFromPermissions -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotateJobs -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=metadata.AnnotatePipelines -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize -10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize -10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize -10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=initialize mutator=seq mutator=terraform.Initialize -10:07:59 Debug: Apply pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit -10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=initialize mutator=seq mutator=scripts.postinit +< } pid=12345 mutator=PopulateCurrentUser sdk=true +10:07:59 Debug: Apply pid=12345 mutator=LoadGitDetails +10:07:59 Debug: Apply pid=12345 mutator=ApplySourceLinkedDeploymentPreset +10:07:59 Debug: Apply pid=12345 mutator=DefineDefaultWorkspaceRoot +10:07:59 Debug: Apply pid=12345 mutator=ExpandWorkspaceRoot +10:07:59 Debug: Apply pid=12345 mutator=DefaultWorkspacePaths +10:07:59 Debug: Apply pid=12345 mutator=PrependWorkspacePrefix +10:07:59 Debug: Apply pid=12345 mutator=RewriteWorkspacePrefix +10:07:59 Debug: Apply pid=12345 mutator=SetVariables +10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(init) +10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(load_resources) +10:07:59 Debug: Apply pid=12345 mutator=PythonMutator(apply_mutators) +10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences +10:07:59 Debug: Apply pid=12345 mutator=ResolveResourceReferences +10:07:59 Debug: Apply pid=12345 mutator=ResolveVariableReferences +10:07:59 Debug: Apply pid=12345 mutator=MergeJobClusters +10:07:59 Debug: Apply pid=12345 mutator=MergeJobParameters +10:07:59 Debug: Apply pid=12345 mutator=MergeJobTasks +10:07:59 Debug: Apply pid=12345 mutator=MergePipelineClusters +10:07:59 Debug: Apply pid=12345 mutator=MergeApps +10:07:59 Debug: Apply pid=12345 mutator=CaptureSchemaDependency +10:07:59 Debug: Apply pid=12345 mutator=CheckPermissions +10:07:59 Debug: Apply pid=12345 mutator=SetRunAs +10:07:59 Debug: Apply pid=12345 mutator=OverrideCompute +10:07:59 Debug: Apply pid=12345 mutator=ConfigureDashboardDefaults +10:07:59 Debug: Apply pid=12345 mutator=ConfigureVolumeDefaults +10:07:59 Debug: Apply pid=12345 mutator=ProcessTargetMode +10:07:59 Debug: Apply pid=12345 mutator=ApplyPresets +10:07:59 Debug: Apply pid=12345 mutator=DefaultQueueing +10:07:59 Debug: Apply pid=12345 mutator=ExpandPipelineGlobPaths +10:07:59 Debug: Apply pid=12345 mutator=ConfigureWSFS +10:07:59 Debug: Apply pid=12345 mutator=TranslatePaths +10:07:59 Debug: Apply pid=12345 mutator=PythonWrapperWarning +10:07:59 Debug: Apply pid=12345 mutator=apps.Validate +10:07:59 Debug: Apply pid=12345 mutator=ValidateSharedRootPermissions +10:07:59 Debug: Apply pid=12345 mutator=ApplyBundlePermissions +10:07:59 Debug: Apply pid=12345 mutator=FilterCurrentUserFromPermissions +10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotateJobs +10:07:59 Debug: Apply pid=12345 mutator=metadata.AnnotatePipelines +10:07:59 Debug: Apply pid=12345 mutator=terraform.Initialize +10:07:59 Debug: Using Terraform from DATABRICKS_TF_EXEC_PATH at [TERRAFORM] pid=12345 mutator=terraform.Initialize +10:07:59 Debug: Using Terraform CLI config from DATABRICKS_TF_CLI_CONFIG_FILE at [DATABRICKS_TF_CLI_CONFIG_FILE] pid=12345 mutator=terraform.Initialize +10:07:59 Debug: Environment variables for Terraform: ...redacted... pid=12345 mutator=terraform.Initialize +10:07:59 Debug: Apply pid=12345 mutator=scripts.postinit +10:07:59 Debug: No script defined for postinit, skipping pid=12345 mutator=scripts.postinit 10:07:59 Debug: Apply pid=12345 mutator=validate 10:07:59 Debug: GET /api/2.0/workspace/get-status?path=/Workspace/Users/[USERNAME]/.bundle/debug/default/files < HTTP/1.1 404 Not Found diff --git a/acceptance/bundle/includes/include_outside_root/a.yml b/acceptance/bundle/includes/include_outside_root/a.yml new file mode 100644 index 000000000..b4addb99b --- /dev/null +++ b/acceptance/bundle/includes/include_outside_root/a.yml @@ -0,0 +1,4 @@ +include: + - b.yml + - c.yml + diff --git a/acceptance/bundle/includes/include_outside_root/databricks.yml b/acceptance/bundle/includes/include_outside_root/databricks.yml new file mode 100644 index 000000000..6addb7d2f --- /dev/null +++ b/acceptance/bundle/includes/include_outside_root/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: include_outside_root + +include: + - a.yml diff --git a/acceptance/bundle/includes/include_outside_root/output.txt b/acceptance/bundle/includes/include_outside_root/output.txt new file mode 100644 index 000000000..720c3a826 --- /dev/null +++ b/acceptance/bundle/includes/include_outside_root/output.txt @@ -0,0 +1,16 @@ +Warning: Include section is defined outside root file + at include + in a.yml:2:3 + +The include section is defined in a file that is not the root file. +These values will be ignored because only the includes defined in +the bundle root file (that is databricks.yml or databricks.yaml) +are loaded. + +Name: include_outside_root +Target: default +Workspace: + User: [USERNAME] + Path: /Workspace/Users/[USERNAME]/.bundle/include_outside_root/default + +Found 1 warning diff --git a/acceptance/bundle/includes/include_outside_root/script b/acceptance/bundle/includes/include_outside_root/script new file mode 100644 index 000000000..72555b332 --- /dev/null +++ b/acceptance/bundle/includes/include_outside_root/script @@ -0,0 +1 @@ +$CLI bundle validate diff --git a/acceptance/bundle/variables/host/output.txt b/acceptance/bundle/variables/host/output.txt index df0a4527a..eef4a08bf 100644 --- a/acceptance/bundle/variables/host/output.txt +++ b/acceptance/bundle/variables/host/output.txt @@ -1,5 +1,12 @@ >>> errcode [CLI] bundle validate -o json +Warning: Variable interpolation is not supported for fields that configure authentication + at workspace.host + in databricks.yml:10:9 + +Interpolation is not supported for the field workspace.host. Please set +the DATABRICKS_HOST environment variable if you wish to configure this field at runtime. + Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name { @@ -27,6 +34,13 @@ Error: failed during request visitor: parse "https://${var.host}": invalid chara Exit code: 1 >>> errcode [CLI] bundle validate +Warning: Variable interpolation is not supported for fields that configure authentication + at workspace.host + in databricks.yml:10:9 + +Interpolation is not supported for the field workspace.host. Please set +the DATABRICKS_HOST environment variable if you wish to configure this field at runtime. + Error: failed during request visitor: parse "https://${var.host}": invalid character "{" in host name Name: host @@ -34,6 +48,6 @@ Target: default Workspace: Host: ${var.host} -Found 1 error +Found 1 error and 1 warning Exit code: 1 diff --git a/acceptance/panic/output.txt b/acceptance/panic/output.txt new file mode 100644 index 000000000..9dca41c23 --- /dev/null +++ b/acceptance/panic/output.txt @@ -0,0 +1,15 @@ + +>>> [CLI] selftest panic +The Databricks CLI unexpectedly had a fatal error. +Please report this issue to Databricks in the form of a GitHub issue at: +https://github.com/databricks/cli + +CLI Version: [DEV_VERSION] + +Panic Payload: the databricks selftest panic command always panics + +Stack Trace: +goroutine 1 [running]: +runtime/debug.Stack() + +Exit code: 1 diff --git a/acceptance/panic/script b/acceptance/panic/script new file mode 100644 index 000000000..a02466923 --- /dev/null +++ b/acceptance/panic/script @@ -0,0 +1,5 @@ +# We filter anything after runtime/debug.Stack() in the output because the stack +# trace itself is hard to perform replacements on, since it can depend upon the +# exact setup of where the modules are installed in your Go setup, memory addresses +# at runtime etc. +trace $CLI selftest panic 2>&1 | sed '/runtime\/debug\.Stack()/q' diff --git a/acceptance/panic/test.toml b/acceptance/panic/test.toml new file mode 100644 index 000000000..e69de29bb diff --git a/bundle/apps/upload_config_test.go b/bundle/apps/upload_config_test.go index a1a6b3afb..1087508f2 100644 --- a/bundle/apps/upload_config_test.go +++ b/bundle/apps/upload_config_test.go @@ -70,6 +70,6 @@ env: bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(root, "databricks.yml")}}) - diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), &u)) + diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), &u) require.NoError(t, diags.Error()) } diff --git a/bundle/apps/validate_test.go b/bundle/apps/validate_test.go index 6c3a88191..11270198e 100644 --- a/bundle/apps/validate_test.go +++ b/bundle/apps/validate_test.go @@ -51,7 +51,7 @@ func TestAppsValidate(t *testing.T) { bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) - diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) + diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate()) require.Len(t, diags, 1) require.Equal(t, "app.yml detected", diags[0].Summary) require.Contains(t, diags[0].Detail, "app.yml and use 'config' property for app resource") @@ -90,7 +90,7 @@ func TestAppsValidateSameSourcePath(t *testing.T) { bundletest.SetLocation(b, ".", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) - diags := bundle.Apply(context.Background(), b, bundle.Seq(mutator.TranslatePaths(), Validate())) + diags := bundle.ApplySeq(context.Background(), b, mutator.TranslatePaths(), Validate()) require.Len(t, diags, 1) require.Equal(t, "Duplicate app source code path", diags[0].Summary) require.Contains(t, diags[0].Detail, "has the same source code path as app resource") diff --git a/bundle/artifacts/all.go b/bundle/artifacts/all.go index 768ccdfe3..b78e7c100 100644 --- a/bundle/artifacts/all.go +++ b/bundle/artifacts/all.go @@ -38,5 +38,5 @@ func (m *all) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { } } - return bundle.Apply(ctx, b, bundle.Seq(out...)) + return bundle.ApplySeq(ctx, b, out...) } diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index 0446135b6..94880bc2c 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -53,5 +53,5 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // if we do it before, any files that are generated by build command will // not be included into artifact.Files and thus will not be uploaded. mutators = append(mutators, &expandGlobs{name: m.name}) - return bundle.Apply(ctx, b, bundle.Seq(mutators...)) + return bundle.ApplySeq(ctx, b, mutators...) } diff --git a/bundle/artifacts/expand_globs_test.go b/bundle/artifacts/expand_globs_test.go index 264c52c50..7bf886330 100644 --- a/bundle/artifacts/expand_globs_test.go +++ b/bundle/artifacts/expand_globs_test.go @@ -39,11 +39,11 @@ func TestExpandGlobs_Nominal(t *testing.T) { bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) ctx := context.Background() - diags := bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.ApplySeq(ctx, b, // Run prepare first to make paths absolute. &prepare{"test"}, &expandGlobs{"test"}, - )) + ) require.NoError(t, diags.Error()) // Assert that the expanded paths are correct. @@ -80,11 +80,11 @@ func TestExpandGlobs_InvalidPattern(t *testing.T) { bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) ctx := context.Background() - diags := bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.ApplySeq(ctx, b, // Run prepare first to make paths absolute. &prepare{"test"}, &expandGlobs{"test"}, - )) + ) assert.Len(t, diags, 4) assert.Equal(t, filepath.Clean("a[.txt")+": syntax error in pattern", diags[0].Summary) @@ -128,11 +128,11 @@ func TestExpandGlobs_NoMatches(t *testing.T) { bundletest.SetLocation(b, "artifacts", []dyn.Location{{File: filepath.Join(tmpDir, "databricks.yml")}}) ctx := context.Background() - diags := bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.ApplySeq(ctx, b, // Run prepare first to make paths absolute. &prepare{"test"}, &expandGlobs{"test"}, - )) + ) assert.Len(t, diags, 2) assert.Equal(t, "c*.txt: no matching files", diags[0].Summary) diff --git a/bundle/config/loader/process_include.go b/bundle/config/loader/process_include.go index f82f5db1e..4f7dd47ca 100644 --- a/bundle/config/loader/process_include.go +++ b/bundle/config/loader/process_include.go @@ -161,6 +161,19 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos return diags } + if len(this.Include) > 0 { + diags = diags.Append(diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Include section is defined outside root file", + Detail: `The include section is defined in a file that is not the root file. +These values will be ignored because only the includes defined in +the bundle root file (that is databricks.yml or databricks.yaml) +are loaded.`, + Locations: this.GetLocations("include"), + Paths: []dyn.Path{dyn.MustPathFromString("include")}, + }) + } + err := b.Config.Merge(this) if err != nil { diags = diags.Extend(diag.FromErr(err)) diff --git a/bundle/config/loader/process_root_includes.go b/bundle/config/loader/process_root_includes.go index 69e6dd4e4..1e1215b30 100644 --- a/bundle/config/loader/process_root_includes.go +++ b/bundle/config/loader/process_root_includes.go @@ -98,5 +98,5 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. // Swap out the original includes list with the expanded globs. b.Config.Include = files - return bundle.Apply(ctx, b, bundle.Seq(out...)) + return bundle.ApplySeq(ctx, b, out...) } diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 5fd9f53e5..1e6d1f59d 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -1,16 +1,19 @@ package mutator import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/loader" pythonmutator "github.com/databricks/cli/bundle/config/mutator/python" "github.com/databricks/cli/bundle/config/validate" "github.com/databricks/cli/bundle/scripts" + "github.com/databricks/cli/libs/diag" ) -func DefaultMutators() []bundle.Mutator { - return []bundle.Mutator{ +func DefaultMutators(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + return bundle.ApplySeq(ctx, b, loader.EntryPoint(), // Execute preinit script before processing includes. @@ -31,5 +34,5 @@ func DefaultMutators() []bundle.Mutator { // Note: This mutator must run before the target overrides are merged. // See the mutator for more details. validate.UniqueResourceKeys(), - } + ) } diff --git a/bundle/config/mutator/prepend_workspace_prefix_test.go b/bundle/config/mutator/prepend_workspace_prefix_test.go index 31393e6bd..d6741f868 100644 --- a/bundle/config/mutator/prepend_workspace_prefix_test.go +++ b/bundle/config/mutator/prepend_workspace_prefix_test.go @@ -80,7 +80,7 @@ func TestPrependWorkspaceForDefaultConfig(t *testing.T) { }, }, } - diags := bundle.Apply(context.Background(), b, bundle.Seq(DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix())) + diags := bundle.ApplySeq(context.Background(), b, DefineDefaultWorkspaceRoot(), ExpandWorkspaceRoot(), DefineDefaultWorkspacePaths(), PrependWorkspacePrefix()) require.Empty(t, diags) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev", b.Config.Workspace.RootPath) require.Equal(t, "/Workspace/Users/jane@doe.com/.bundle/test/dev/artifacts", b.Config.Workspace.ArtifactPath) diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index eb542c66e..d63f3ec86 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -163,8 +163,7 @@ func mockBundle(mode config.Mode) *bundle.Bundle { func TestProcessTargetModeDevelopment(t *testing.T) { b := mockBundle(config.Development) - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) // Job 1 @@ -224,8 +223,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAws(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) // Assert that tag normalization took place. @@ -239,8 +237,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForAzure(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) // Assert that tag normalization took place (Azure allows more characters than AWS). @@ -254,8 +251,7 @@ func TestProcessTargetModeDevelopmentTagNormalizationForGcp(t *testing.T) { }) b.Config.Workspace.CurrentUser.ShortName = "Héllö wörld?!" - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) // Assert that tag normalization took place. @@ -311,8 +307,7 @@ func TestValidateDevelopmentMode(t *testing.T) { func TestProcessTargetModeDefault(t *testing.T) { b := mockBundle("") - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, "job1", b.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", b.Config.Resources.Pipelines["pipeline1"].Name) @@ -429,8 +424,7 @@ func TestAllNonUcResourcesAreRenamed(t *testing.T) { reflect.TypeOf(&resources.Volume{}), } - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) resources := reflect.ValueOf(b.Config.Resources) @@ -484,8 +478,7 @@ func TestPrefixAlreadySet(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.NamePrefix = "custom_lennart_deploy_" - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, "custom_lennart_deploy_job1", b.Config.Resources.Jobs["job1"].Name) @@ -498,8 +491,7 @@ func TestTagsAlreadySet(t *testing.T) { "dev": "foo", } - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, "tag", b.Config.Resources.Jobs["job1"].Tags["custom"]) @@ -510,8 +502,7 @@ func TestTagsNil(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.Tags = nil - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) @@ -521,8 +512,7 @@ func TestTagsEmptySet(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.Tags = map[string]string{} - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, "lennart", b.Config.Resources.Jobs["job2"].Tags["dev"]) @@ -532,8 +522,7 @@ func TestJobsMaxConcurrentRunsAlreadySet(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.JobsMaxConcurrentRuns = 10 - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, 10, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) @@ -543,8 +532,7 @@ func TestJobsMaxConcurrentRunsDisabled(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.JobsMaxConcurrentRuns = 1 - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.Equal(t, 1, b.Config.Resources.Jobs["job1"].MaxConcurrentRuns) @@ -554,8 +542,7 @@ func TestTriggerPauseStatusWhenUnpaused(t *testing.T) { b := mockBundle(config.Development) b.Config.Presets.TriggerPauseStatus = config.Unpaused - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.ErrorContains(t, diags.Error(), "target with 'mode: development' cannot set trigger pause status to UNPAUSED by default") } @@ -564,8 +551,7 @@ func TestPipelinesDevelopmentDisabled(t *testing.T) { notEnabled := false b.Config.Presets.PipelinesDevelopment = ¬Enabled - m := bundle.Seq(ProcessTargetMode(), ApplyPresets()) - diags := bundle.Apply(context.Background(), b, m) + diags := bundle.ApplySeq(context.Background(), b, ProcessTargetMode(), ApplyPresets()) require.NoError(t, diags.Error()) assert.False(t, b.Config.Resources.Pipelines["pipeline1"].CreatePipeline.Development) diff --git a/bundle/config/mutator/resolve_resource_references_test.go b/bundle/config/mutator/resolve_resource_references_test.go index 624e337c7..6bd974199 100644 --- a/bundle/config/mutator/resolve_resource_references_test.go +++ b/bundle/config/mutator/resolve_resource_references_test.go @@ -176,7 +176,7 @@ func TestResolveVariableReferencesInVariableLookups(t *testing.T) { {ClusterId: "9876-5432-xywz", ClusterName: "some other cluster"}, }, nil) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) + diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences()) require.NoError(t, diags.Error()) require.Equal(t, "cluster-bar-dev", b.Config.Variables["lookup"].Lookup.Cluster) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) @@ -203,7 +203,7 @@ func TestResolveLookupVariableReferencesInVariableLookups(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ResolveVariableReferencesInLookup(), ResolveResourceReferences())) + diags := bundle.ApplySeq(context.Background(), b, ResolveVariableReferencesInLookup(), ResolveResourceReferences()) require.ErrorContains(t, diags.Error(), "lookup variables cannot contain references to another lookup variables") } @@ -229,7 +229,7 @@ func TestNoResolveLookupIfVariableSetWithEnvVariable(t *testing.T) { ctx := context.Background() ctx = env.Set(ctx, "BUNDLE_VAR_lookup", "1234-5678-abcd") - diags := bundle.Apply(ctx, b, bundle.Seq(SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences())) + diags := bundle.ApplySeq(ctx, b, SetVariables(), ResolveVariableReferencesInLookup(), ResolveResourceReferences()) require.NoError(t, diags.Error()) require.Equal(t, "1234-5678-abcd", b.Config.Variables["lookup"].Value) } diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index 6cfe5718a..14d99346e 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -828,12 +828,10 @@ func TestTranslatePathWithComplexVariables(t *testing.T) { }) require.NoError(t, diags.Error()) - diags = bundle.Apply(ctx, b, - bundle.Seq( - mutator.SetVariables(), - mutator.ResolveVariableReferences("variables"), - mutator.TranslatePaths(), - )) + diags = bundle.ApplySeq(ctx, b, + mutator.SetVariables(), + mutator.ResolveVariableReferences("variables"), + mutator.TranslatePaths()) require.NoError(t, diags.Error()) assert.Equal( diff --git a/bundle/config/validate/interpolation_in_auth_config.go b/bundle/config/validate/interpolation_in_auth_config.go new file mode 100644 index 000000000..83d6c6ae1 --- /dev/null +++ b/bundle/config/validate/interpolation_in_auth_config.go @@ -0,0 +1,77 @@ +package validate + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/auth" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/dynvar" +) + +type noInterpolationInAuthConfig struct{} + +func NoInterpolationInAuthConfig() bundle.Mutator { + return &noInterpolationInAuthConfig{} +} + +func (f *noInterpolationInAuthConfig) Name() string { + return "validate:interpolation_in_auth_config" +} + +func (f *noInterpolationInAuthConfig) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + authFields := []string{ + // Generic attributes. + "host", "profile", "auth_type", "metadata_service_url", + + // OAuth specific attributes. + "client_id", + + // Google specific attributes. + "google_service_account", + + // Azure specific attributes. + "azure_resource_id", "azure_use_msi", "azure_client_id", "azure_tenant_id", + "azure_environment", "azure_login_app_id", + } + + diags := diag.Diagnostics{} + + for _, fieldName := range authFields { + p := dyn.NewPath(dyn.Key("workspace"), dyn.Key(fieldName)) + v, err := dyn.GetByPath(b.Config.Value(), p) + if dyn.IsNoSuchKeyError(err) { + continue + } + if err != nil { + return diag.FromErr(err) + } + + if v.Kind() == dyn.KindInvalid || v.Kind() == dyn.KindNil { + continue + } + + vv := v.MustString() + + // Check if the field contains interpolation. + if dynvar.ContainsVariableReference(vv) { + envVar, ok := auth.GetEnvFor(fieldName) + if !ok { + continue + } + + diags = append(diags, diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Variable interpolation is not supported for fields that configure authentication", + Detail: fmt.Sprintf(`Interpolation is not supported for the field %s. Please set +the %s environment variable if you wish to configure this field at runtime.`, p.String(), envVar), + Locations: v.Locations(), + Paths: []dyn.Path{p}, + }) + } + } + + return diags +} diff --git a/bundle/deferred.go b/bundle/deferred.go deleted file mode 100644 index e7e0c2aeb..000000000 --- a/bundle/deferred.go +++ /dev/null @@ -1,30 +0,0 @@ -package bundle - -import ( - "context" - - "github.com/databricks/cli/libs/diag" -) - -type DeferredMutator struct { - mutator Mutator - finally Mutator -} - -func (d *DeferredMutator) Name() string { - return "deferred" -} - -func Defer(mutator, finally Mutator) Mutator { - return &DeferredMutator{ - mutator: mutator, - finally: finally, - } -} - -func (d *DeferredMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { - var diags diag.Diagnostics - diags = diags.Extend(Apply(ctx, b, d.mutator)) - diags = diags.Extend(Apply(ctx, b, d.finally)) - return diags -} diff --git a/bundle/deferred_test.go b/bundle/deferred_test.go deleted file mode 100644 index ea3df17c4..000000000 --- a/bundle/deferred_test.go +++ /dev/null @@ -1,114 +0,0 @@ -package bundle - -import ( - "context" - "testing" - - "github.com/databricks/cli/libs/diag" - "github.com/stretchr/testify/assert" -) - -type mutatorWithError struct { - applyCalled int - errorMsg string -} - -func (t *mutatorWithError) Name() string { - return "mutatorWithError" -} - -func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics { - t.applyCalled++ - return diag.Errorf(t.errorMsg) // nolint:govet -} - -func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - cleanup := &testMutator{} - deferredMutator := Defer(Seq(m1, m2, m3), cleanup) - - b := &Bundle{} - diags := Apply(context.Background(), b, deferredMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, m3.applyCalled) - assert.Equal(t, 1, cleanup.applyCalled) -} - -func TestDeferredMutatorWhenFirstFails(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - mErr := &mutatorWithError{errorMsg: "mutator error occurred"} - cleanup := &testMutator{} - deferredMutator := Defer(Seq(mErr, m1, m2), cleanup) - - b := &Bundle{} - diags := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, diags.Error(), "mutator error occurred") - - assert.Equal(t, 1, mErr.applyCalled) - assert.Equal(t, 0, m1.applyCalled) - assert.Equal(t, 0, m2.applyCalled) - assert.Equal(t, 1, cleanup.applyCalled) -} - -func TestDeferredMutatorWhenMiddleOneFails(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - mErr := &mutatorWithError{errorMsg: "mutator error occurred"} - cleanup := &testMutator{} - deferredMutator := Defer(Seq(m1, mErr, m2), cleanup) - - b := &Bundle{} - diags := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, diags.Error(), "mutator error occurred") - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, mErr.applyCalled) - assert.Equal(t, 0, m2.applyCalled) - assert.Equal(t, 1, cleanup.applyCalled) -} - -func TestDeferredMutatorWhenLastOneFails(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - mErr := &mutatorWithError{errorMsg: "mutator error occurred"} - cleanup := &testMutator{} - deferredMutator := Defer(Seq(m1, m2, mErr), cleanup) - - b := &Bundle{} - diags := Apply(context.Background(), b, deferredMutator) - assert.ErrorContains(t, diags.Error(), "mutator error occurred") - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, mErr.applyCalled) - assert.Equal(t, 1, cleanup.applyCalled) -} - -func TestDeferredMutatorCombinesErrorMessages(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - mErr := &mutatorWithError{errorMsg: "mutator error occurred"} - cleanupErr := &mutatorWithError{errorMsg: "cleanup error occurred"} - deferredMutator := Defer(Seq(m1, m2, mErr), cleanupErr) - - b := &Bundle{} - diags := Apply(context.Background(), b, deferredMutator) - - var errs []string - for _, d := range diags { - errs = append(errs, d.Summary) - } - assert.Contains(t, errs, "mutator error occurred") - assert.Contains(t, errs, "cleanup error occurred") - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, mErr.applyCalled) - assert.Equal(t, 1, cleanupErr.applyCalled) -} diff --git a/bundle/deploy/terraform/load_test.go b/bundle/deploy/terraform/load_test.go index b7243ca19..e892535fe 100644 --- a/bundle/deploy/terraform/load_test.go +++ b/bundle/deploy/terraform/load_test.go @@ -32,10 +32,10 @@ func TestLoadWithNoState(t *testing.T) { t.Setenv("DATABRICKS_TOKEN", "foobar") b.WorkspaceClient() - diags := bundle.Apply(context.Background(), b, bundle.Seq( + diags := bundle.ApplySeq(context.Background(), b, Initialize(), Load(ErrorOnEmptyState), - )) + ) require.ErrorContains(t, diags.Error(), "Did you forget to run 'databricks bundle deploy'") } diff --git a/bundle/if.go b/bundle/if.go deleted file mode 100644 index bad1d72d2..000000000 --- a/bundle/if.go +++ /dev/null @@ -1,40 +0,0 @@ -package bundle - -import ( - "context" - - "github.com/databricks/cli/libs/diag" -) - -type ifMutator struct { - condition func(context.Context, *Bundle) (bool, error) - onTrueMutator Mutator - onFalseMutator Mutator -} - -func If( - condition func(context.Context, *Bundle) (bool, error), - onTrueMutator Mutator, - onFalseMutator Mutator, -) Mutator { - return &ifMutator{ - condition, onTrueMutator, onFalseMutator, - } -} - -func (m *ifMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { - v, err := m.condition(ctx, b) - if err != nil { - return diag.FromErr(err) - } - - if v { - return Apply(ctx, b, m.onTrueMutator) - } else { - return Apply(ctx, b, m.onFalseMutator) - } -} - -func (m *ifMutator) Name() string { - return "If" -} diff --git a/bundle/if_test.go b/bundle/if_test.go deleted file mode 100644 index b3fc0b9d9..000000000 --- a/bundle/if_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package bundle - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIfMutatorTrue(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - ifMutator := If(func(context.Context, *Bundle) (bool, error) { - return true, nil - }, m1, m2) - - b := &Bundle{} - diags := Apply(context.Background(), b, ifMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 0, m2.applyCalled) -} - -func TestIfMutatorFalse(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - ifMutator := If(func(context.Context, *Bundle) (bool, error) { - return false, nil - }, m1, m2) - - b := &Bundle{} - diags := Apply(context.Background(), b, ifMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 0, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) -} - -func TestIfMutatorError(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - ifMutator := If(func(context.Context, *Bundle) (bool, error) { - return true, assert.AnError - }, m1, m2) - - b := &Bundle{} - diags := Apply(context.Background(), b, ifMutator) - assert.Error(t, diags.Error()) - - assert.Equal(t, 0, m1.applyCalled) - assert.Equal(t, 0, m2.applyCalled) -} diff --git a/bundle/libraries/expand_glob_references.go b/bundle/libraries/expand_glob_references.go index bb1905045..7a808f627 100644 --- a/bundle/libraries/expand_glob_references.go +++ b/bundle/libraries/expand_glob_references.go @@ -92,7 +92,7 @@ func expandLibraries(b *bundle.Bundle, p dyn.Path, v dyn.Value) (diag.Diagnostic for _, match := range matches { output = append(output, dyn.NewValue(map[string]dyn.Value{ - libType: dyn.V(match), + libType: dyn.NewValue(match, lib.Locations()), }, lib.Locations())) } } diff --git a/bundle/libraries/same_name_libraries.go b/bundle/libraries/same_name_libraries.go new file mode 100644 index 000000000..ab869d3d2 --- /dev/null +++ b/bundle/libraries/same_name_libraries.go @@ -0,0 +1,104 @@ +package libraries + +import ( + "context" + "path/filepath" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" +) + +type checkForSameNameLibraries struct{} + +var patterns = []dyn.Pattern{ + taskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()), + forEachTaskLibrariesPattern.Append(dyn.AnyIndex(), dyn.AnyKey()), + envDepsPattern.Append(dyn.AnyIndex()), +} + +type libData struct { + fullPath string + locations []dyn.Location + paths []dyn.Path + otherPaths []string +} + +func (c checkForSameNameLibraries) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + var diags diag.Diagnostics + libs := make(map[string]*libData) + + err := b.Config.Mutate(func(rootConfig dyn.Value) (dyn.Value, error) { + var err error + for _, pattern := range patterns { + rootConfig, err = dyn.MapByPattern(rootConfig, pattern, func(p dyn.Path, libraryValue dyn.Value) (dyn.Value, error) { + libPath, ok := libraryValue.AsString() + if !ok { + return libraryValue, nil + } + + // If not local library, skip the check + if !IsLibraryLocal(libPath) { + return libraryValue, nil + } + + lib := filepath.Base(libPath) + // If the same basename was seen already but full path is different + // then it's a duplicate. Add the location to the location list. + lp, ok := libs[lib] + if !ok { + libs[lib] = &libData{ + fullPath: libPath, + locations: []dyn.Location{libraryValue.Location()}, + paths: []dyn.Path{p}, + otherPaths: []string{}, + } + } else if lp.fullPath != libPath { + lp.locations = append(lp.locations, libraryValue.Location()) + lp.paths = append(lp.paths, p) + lp.otherPaths = append(lp.otherPaths, libPath) + } + + return libraryValue, nil + }) + if err != nil { + return dyn.InvalidValue, err + } + } + + if err != nil { + return dyn.InvalidValue, err + } + + return rootConfig, nil + }) + + // Iterate over all the libraries and check if there are any duplicates. + // Duplicates will have more than one location. + // If there are duplicates, add a diagnostic. + for lib, lv := range libs { + if len(lv.locations) > 1 { + diags = append(diags, diag.Diagnostic{ + Severity: diag.Error, + Summary: "Duplicate local library names: " + lib, + Detail: "Local library names must be unique but found libraries with the same name: " + lv.fullPath + ", " + strings.Join(lv.otherPaths, ", "), + Locations: lv.locations, + Paths: lv.paths, + }) + } + } + if err != nil { + diags = diags.Extend(diag.FromErr(err)) + } + + return diags +} + +func (c checkForSameNameLibraries) Name() string { + return "CheckForSameNameLibraries" +} + +func CheckForSameNameLibraries() bundle.Mutator { + return checkForSameNameLibraries{} +} diff --git a/bundle/libraries/upload_test.go b/bundle/libraries/upload_test.go index 44b194c56..3ce67eeb6 100644 --- a/bundle/libraries/upload_test.go +++ b/bundle/libraries/upload_test.go @@ -93,7 +93,7 @@ func TestArtifactUploadForWorkspace(t *testing.T) { filer.CreateParentDirectories, ).Return(nil) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) + diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler)) require.NoError(t, diags.Error()) // Test that libraries path is updated @@ -181,7 +181,7 @@ func TestArtifactUploadForVolumes(t *testing.T) { filer.CreateParentDirectories, ).Return(nil) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) + diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler)) require.NoError(t, diags.Error()) // Test that libraries path is updated @@ -225,7 +225,7 @@ func TestArtifactUploadWithNoLibraryReference(t *testing.T) { filer.CreateParentDirectories, ).Return(nil) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) + diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler)) require.NoError(t, diags.Error()) require.Equal(t, "/Workspace/foo/bar/artifacts/.internal/source.whl", b.Config.Artifacts["whl"].Files[0].RemotePath) @@ -311,7 +311,7 @@ func TestUploadMultipleLibraries(t *testing.T) { filer.CreateParentDirectories, ).Return(nil).Once() - diags := bundle.Apply(context.Background(), b, bundle.Seq(ExpandGlobReferences(), UploadWithClient(mockFiler))) + diags := bundle.ApplySeq(context.Background(), b, ExpandGlobReferences(), UploadWithClient(mockFiler)) require.NoError(t, diags.Error()) // Test that libraries path is updated diff --git a/bundle/log_string.go b/bundle/log_string.go deleted file mode 100644 index f14e3a3ad..000000000 --- a/bundle/log_string.go +++ /dev/null @@ -1,28 +0,0 @@ -package bundle - -import ( - "context" - - "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/diag" -) - -type LogStringMutator struct { - message string -} - -func (d *LogStringMutator) Name() string { - return "log_string" -} - -func LogString(message string) Mutator { - return &LogStringMutator{ - message: message, - } -} - -func (m *LogStringMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { - cmdio.LogString(ctx, m.message) - - return nil -} diff --git a/bundle/mutator.go b/bundle/mutator.go index 16ef79ee7..af3940414 100644 --- a/bundle/mutator.go +++ b/bundle/mutator.go @@ -48,6 +48,17 @@ func Apply(ctx context.Context, b *Bundle, m Mutator) diag.Diagnostics { return diags } +func ApplySeq(ctx context.Context, b *Bundle, mutators ...Mutator) diag.Diagnostics { + diags := diag.Diagnostics{} + for _, m := range mutators { + diags = diags.Extend(Apply(ctx, b, m)) + if diags.HasError() { + return diags + } + } + return diags +} + type funcMutator struct { fn func(context.Context, *Bundle) diag.Diagnostics } diff --git a/bundle/mutator_test.go b/bundle/mutator_test.go index 04ff19cff..d6f21adc9 100644 --- a/bundle/mutator_test.go +++ b/bundle/mutator_test.go @@ -19,7 +19,7 @@ func (t *testMutator) Name() string { func (t *testMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { t.applyCalled++ - return Apply(ctx, b, Seq(t.nestedMutators...)) + return ApplySeq(ctx, b, t.nestedMutators...) } func TestMutator(t *testing.T) { diff --git a/bundle/permissions/validate_test.go b/bundle/permissions/validate_test.go index ff132b4e1..aa0b486d3 100644 --- a/bundle/permissions/validate_test.go +++ b/bundle/permissions/validate_test.go @@ -34,7 +34,7 @@ func TestValidateSharedRootPermissionsForShared(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) + diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions()) require.Empty(t, diags) } @@ -59,7 +59,7 @@ func TestValidateSharedRootPermissionsForSharedError(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions())) + diags := bundle.Apply(context.Background(), b, ValidateSharedRootPermissions()) require.Len(t, diags, 1) require.Equal(t, "the bundle root path /Workspace/Shared/foo/bar is writable by all workspace users", diags[0].Summary) require.Equal(t, diag.Warning, diags[0].Severity) diff --git a/bundle/permissions/workspace_root_test.go b/bundle/permissions/workspace_root_test.go index 3e5f9c61b..f9c5da7d6 100644 --- a/bundle/permissions/workspace_root_test.go +++ b/bundle/permissions/workspace_root_test.go @@ -73,7 +73,7 @@ func TestApplyWorkspaceRootPermissions(t *testing.T) { WorkspaceObjectType: "directories", }).Return(nil, nil) - diags := bundle.Apply(context.Background(), b, bundle.Seq(ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions())) + diags := bundle.ApplySeq(context.Background(), b, ValidateSharedRootPermissions(), ApplyWorkspaceRootPermissions()) require.Empty(t, diags) } diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index c62c48aea..ae54e8657 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -1,45 +1,57 @@ package phases import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" ) -func Bind(opts *terraform.BindOptions) bundle.Mutator { - return newPhase( - "bind", - []bundle.Mutator{ - lock.Acquire(), - bundle.Defer( - bundle.Seq( - terraform.StatePull(), - terraform.Interpolate(), - terraform.Write(), - terraform.Import(opts), - terraform.StatePush(), - ), - lock.Release(lock.GoalBind), - ), - }, - ) +func Bind(ctx context.Context, b *bundle.Bundle, opts *terraform.BindOptions) (diags diag.Diagnostics) { + log.Info(ctx, "Phase: bind") + + diags = bundle.Apply(ctx, b, lock.Acquire()) + if diags.HasError() { + return diags + } + + defer func() { + diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalBind))) + }() + + diags = diags.Extend(bundle.ApplySeq(ctx, b, + terraform.StatePull(), + terraform.Interpolate(), + terraform.Write(), + terraform.Import(opts), + terraform.StatePush(), + )) + + return diags } -func Unbind(resourceType, resourceKey string) bundle.Mutator { - return newPhase( - "unbind", - []bundle.Mutator{ - lock.Acquire(), - bundle.Defer( - bundle.Seq( - terraform.StatePull(), - terraform.Interpolate(), - terraform.Write(), - terraform.Unbind(resourceType, resourceKey), - terraform.StatePush(), - ), - lock.Release(lock.GoalUnbind), - ), - }, - ) +func Unbind(ctx context.Context, b *bundle.Bundle, resourceType, resourceKey string) (diags diag.Diagnostics) { + log.Info(ctx, "Phase: unbind") + + diags = bundle.Apply(ctx, b, lock.Acquire()) + if diags.HasError() { + return diags + } + + defer func() { + diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalUnbind))) + }() + + diags = diags.Extend(bundle.ApplySeq(ctx, b, + terraform.StatePull(), + terraform.Interpolate(), + terraform.Write(), + terraform.Unbind(resourceType, resourceKey), + terraform.StatePush(), + )) + + return diags } diff --git a/bundle/phases/build.go b/bundle/phases/build.go index cc35983ec..0170ed51c 100644 --- a/bundle/phases/build.go +++ b/bundle/phases/build.go @@ -1,28 +1,31 @@ package phases import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" "github.com/databricks/cli/bundle/artifacts/whl" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/scripts" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" ) // The build phase builds artifacts. -func Build() bundle.Mutator { - return newPhase( - "build", - []bundle.Mutator{ - scripts.Execute(config.ScriptPreBuild), - whl.DetectPackage(), - artifacts.InferMissingProperties(), - artifacts.PrepareAll(), - artifacts.BuildAll(), - scripts.Execute(config.ScriptPostBuild), - mutator.ResolveVariableReferences( - "artifacts", - ), - }, +func Build(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + log.Info(ctx, "Phase: build") + + return bundle.ApplySeq(ctx, b, + scripts.Execute(config.ScriptPreBuild), + whl.DetectPackage(), + artifacts.InferMissingProperties(), + artifacts.PrepareAll(), + artifacts.BuildAll(), + scripts.Execute(config.ScriptPostBuild), + mutator.ResolveVariableReferences( + "artifacts", + ), ) } diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index c6ec04962..02e0e9bd8 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -19,6 +19,8 @@ import ( "github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/trampoline" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/sync" terraformlib "github.com/databricks/cli/libs/terraform" tfjson "github.com/hashicorp/terraform-json" @@ -124,60 +126,94 @@ is removed from the catalog, but the underlying files are not deleted:` return approved, nil } -// The deploy phase deploys artifacts and resources. -func Deploy(outputHandler sync.OutputHandler) bundle.Mutator { +func deployCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Core mutators that CRUD resources and modify deployment state. These // mutators need informed consent if they are potentially destructive. - deployCore := bundle.Defer( - bundle.Seq( - bundle.LogString("Deploying resources..."), - terraform.Apply(), - ), - bundle.Seq( - terraform.StatePush(), - terraform.Load(), - apps.InterpolateVariables(), - apps.UploadConfig(), - metadata.Compute(), - metadata.Upload(), - bundle.LogString("Deployment complete!"), - ), - ) + cmdio.LogString(ctx, "Deploying resources...") + diags := bundle.Apply(ctx, b, terraform.Apply()) - deployMutator := bundle.Seq( + // following original logic, continuing with sequence below even if terraform had errors + + diags = diags.Extend(bundle.ApplySeq(ctx, b, + terraform.StatePush(), + terraform.Load(), + apps.InterpolateVariables(), + apps.UploadConfig(), + metadata.Compute(), + metadata.Upload(), + )) + + if !diags.HasError() { + cmdio.LogString(ctx, "Deployment complete!") + } + + return diags +} + +// The deploy phase deploys artifacts and resources. +func Deploy(ctx context.Context, b *bundle.Bundle, outputHandler sync.OutputHandler) (diags diag.Diagnostics) { + log.Info(ctx, "Phase: deploy") + + // Core mutators that CRUD resources and modify deployment state. These + // mutators need informed consent if they are potentially destructive. + diags = bundle.ApplySeq(ctx, b, scripts.Execute(config.ScriptPreDeploy), lock.Acquire(), - bundle.Defer( - bundle.Seq( - terraform.StatePull(), - terraform.CheckDashboardsModifiedRemotely(), - deploy.StatePull(), - mutator.ValidateGitDetails(), - artifacts.CleanUp(), - libraries.ExpandGlobReferences(), - libraries.Upload(), - trampoline.TransformWheelTask(), - files.Upload(outputHandler), - deploy.StateUpdate(), - deploy.StatePush(), - permissions.ApplyWorkspaceRootPermissions(), - terraform.Interpolate(), - terraform.Write(), - terraform.CheckRunningResource(), - terraform.Plan(terraform.PlanGoal("deploy")), - bundle.If( - approvalForDeploy, - deployCore, - bundle.LogString("Deployment cancelled!"), - ), - ), - lock.Release(lock.GoalDeploy), - ), - scripts.Execute(config.ScriptPostDeploy), ) - return newPhase( - "deploy", - []bundle.Mutator{deployMutator}, + if diags.HasError() { + // lock is not acquired here + return diags + } + + // lock is acquired here + defer func() { + diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDeploy))) + }() + + diags = bundle.ApplySeq(ctx, b, + terraform.StatePull(), + terraform.CheckDashboardsModifiedRemotely(), + deploy.StatePull(), + mutator.ValidateGitDetails(), + artifacts.CleanUp(), + // libraries.CheckForSameNameLibraries() needs to be run after we expand glob references so we + // know what are the actual library paths. + // libraries.ExpandGlobReferences() has to be run after the libraries are built and thus this + // mutator is part of the deploy step rather than validate. + libraries.ExpandGlobReferences(), + libraries.CheckForSameNameLibraries(), + libraries.Upload(), + trampoline.TransformWheelTask(), + files.Upload(outputHandler), + deploy.StateUpdate(), + deploy.StatePush(), + permissions.ApplyWorkspaceRootPermissions(), + terraform.Interpolate(), + terraform.Write(), + terraform.CheckRunningResource(), + terraform.Plan(terraform.PlanGoal("deploy")), ) + + if diags.HasError() { + return diags + } + + haveApproval, err := approvalForDeploy(ctx, b) + if err != nil { + diags = diags.Extend(diag.FromErr(err)) + return diags + } + + if haveApproval { + diags = diags.Extend(deployCore(ctx, b)) + } else { + cmdio.LogString(ctx, "Deployment cancelled!") + } + + if diags.HasError() { + return diags + } + + return diags.Extend(bundle.Apply(ctx, b, scripts.Execute(config.ScriptPostDeploy))) } diff --git a/bundle/phases/destroy.go b/bundle/phases/destroy.go index 05a41dea2..daff07965 100644 --- a/bundle/phases/destroy.go +++ b/bundle/phases/destroy.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/log" terraformlib "github.com/databricks/cli/libs/terraform" @@ -77,42 +78,65 @@ func approvalForDestroy(ctx context.Context, b *bundle.Bundle) (bool, error) { return approved, nil } -// The destroy phase deletes artifacts and resources. -func Destroy() bundle.Mutator { +func destroyCore(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // Core destructive mutators for destroy. These require informed user consent. - destroyCore := bundle.Seq( + diags := bundle.ApplySeq(ctx, b, terraform.Apply(), files.Delete(), - bundle.LogString("Destroy complete!"), ) - destroyMutator := bundle.Seq( - lock.Acquire(), - bundle.Defer( - bundle.Seq( - terraform.StatePull(), - terraform.Interpolate(), - terraform.Write(), - terraform.Plan(terraform.PlanGoal("destroy")), - bundle.If( - approvalForDestroy, - destroyCore, - bundle.LogString("Destroy cancelled!"), - ), - ), - lock.Release(lock.GoalDestroy), - ), - ) + if !diags.HasError() { + cmdio.LogString(ctx, "Destroy complete!") + } - return newPhase( - "destroy", - []bundle.Mutator{ - // Only run deploy mutator if root path exists. - bundle.If( - assertRootPathExists, - destroyMutator, - bundle.LogString("No active deployment found to destroy!"), - ), - }, - ) + return diags +} + +// The destroy phase deletes artifacts and resources. +func Destroy(ctx context.Context, b *bundle.Bundle) (diags diag.Diagnostics) { + log.Info(ctx, "Phase: destroy") + + ok, err := assertRootPathExists(ctx, b) + if err != nil { + return diag.FromErr(err) + } + + if !ok { + cmdio.LogString(ctx, "No active deployment found to destroy!") + return diags + } + + diags = diags.Extend(bundle.Apply(ctx, b, lock.Acquire())) + if diags.HasError() { + return diags + } + + defer func() { + diags = diags.Extend(bundle.Apply(ctx, b, lock.Release(lock.GoalDestroy))) + }() + + diags = diags.Extend(bundle.ApplySeq(ctx, b, + terraform.StatePull(), + terraform.Interpolate(), + terraform.Write(), + terraform.Plan(terraform.PlanGoal("destroy")), + )) + + if diags.HasError() { + return diags + } + + hasApproval, err := approvalForDestroy(ctx, b) + if err != nil { + diags = diags.Extend(diag.FromErr(err)) + return diags + } + + if hasApproval { + diags = diags.Extend(destroyCore(ctx, b)) + } else { + cmdio.LogString(ctx, "Destroy cancelled!") + } + + return diags } diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index afd6def3f..1da5b61f4 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -1,6 +1,8 @@ package phases import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/apps" "github.com/databricks/cli/bundle/config" @@ -12,95 +14,97 @@ import ( "github.com/databricks/cli/bundle/permissions" "github.com/databricks/cli/bundle/scripts" "github.com/databricks/cli/bundle/trampoline" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" ) // The initialize phase fills in defaults and connects to the workspace. // Interpolation of fields referring to the "bundle" and "workspace" keys // happens upon completion of this phase. -func Initialize() bundle.Mutator { - return newPhase( - "initialize", - []bundle.Mutator{ - validate.AllResourcesHaveValues(), +func Initialize(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + log.Info(ctx, "Phase: initialize") - // Update all path fields in the sync block to be relative to the bundle root path. - mutator.RewriteSyncPaths(), + return bundle.ApplySeq(ctx, b, + validate.AllResourcesHaveValues(), + validate.NoInterpolationInAuthConfig(), - // Configure the default sync path to equal the bundle root if not explicitly configured. - // By default, this means all files in the bundle root directory are synchronized. - mutator.SyncDefaultPath(), + // Update all path fields in the sync block to be relative to the bundle root path. + mutator.RewriteSyncPaths(), - // Figure out if the sync root path is identical or an ancestor of the bundle root path. - // If it is an ancestor, this updates all paths to be relative to the sync root path. - mutator.SyncInferRoot(), + // Configure the default sync path to equal the bundle root if not explicitly configured. + // By default, this means all files in the bundle root directory are synchronized. + mutator.SyncDefaultPath(), - mutator.PopulateCurrentUser(), - mutator.LoadGitDetails(), + // Figure out if the sync root path is identical or an ancestor of the bundle root path. + // If it is an ancestor, this updates all paths to be relative to the sync root path. + mutator.SyncInferRoot(), - // This mutator needs to be run before variable interpolation and defining default workspace paths - // because it affects how workspace variables are resolved. - mutator.ApplySourceLinkedDeploymentPreset(), + mutator.PopulateCurrentUser(), + mutator.LoadGitDetails(), - mutator.DefineDefaultWorkspaceRoot(), - mutator.ExpandWorkspaceRoot(), - mutator.DefineDefaultWorkspacePaths(), - mutator.PrependWorkspacePrefix(), + // This mutator needs to be run before variable interpolation and defining default workspace paths + // because it affects how workspace variables are resolved. + mutator.ApplySourceLinkedDeploymentPreset(), - // This mutator needs to be run before variable interpolation because it - // searches for strings with variable references in them. - mutator.RewriteWorkspacePrefix(), + mutator.DefineDefaultWorkspaceRoot(), + mutator.ExpandWorkspaceRoot(), + mutator.DefineDefaultWorkspacePaths(), + mutator.PrependWorkspacePrefix(), - mutator.SetVariables(), + // This mutator needs to be run before variable interpolation because it + // searches for strings with variable references in them. + mutator.RewriteWorkspacePrefix(), - // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences, - // ResolveVariableReferencesInComplexVariables and ResolveVariableReferences. - // See what is expected in PythonMutatorPhaseInit doc - pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit), - pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseLoadResources), - pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseApplyMutators), - mutator.ResolveVariableReferencesInLookup(), - mutator.ResolveResourceReferences(), - mutator.ResolveVariableReferences( - "bundle", - "workspace", - "variables", - ), + mutator.SetVariables(), - mutator.MergeJobClusters(), - mutator.MergeJobParameters(), - mutator.MergeJobTasks(), - mutator.MergePipelineClusters(), - mutator.MergeApps(), + // Intentionally placed before ResolveVariableReferencesInLookup, ResolveResourceReferences, + // ResolveVariableReferencesInComplexVariables and ResolveVariableReferences. + // See what is expected in PythonMutatorPhaseInit doc + pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseInit), + pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseLoadResources), + pythonmutator.PythonMutator(pythonmutator.PythonMutatorPhaseApplyMutators), + mutator.ResolveVariableReferencesInLookup(), + mutator.ResolveResourceReferences(), + mutator.ResolveVariableReferences( + "bundle", + "workspace", + "variables", + ), - mutator.CaptureSchemaDependency(), + mutator.MergeJobClusters(), + mutator.MergeJobParameters(), + mutator.MergeJobTasks(), + mutator.MergePipelineClusters(), + mutator.MergeApps(), - // Provide permission config errors & warnings after initializing all variables - permissions.PermissionDiagnostics(), - mutator.SetRunAs(), - mutator.OverrideCompute(), - mutator.ConfigureDashboardDefaults(), - mutator.ConfigureVolumeDefaults(), - mutator.ProcessTargetMode(), - mutator.ApplyPresets(), - mutator.DefaultQueueing(), - mutator.ExpandPipelineGlobPaths(), + mutator.CaptureSchemaDependency(), - // Configure use of WSFS for reads if the CLI is running on Databricks. - mutator.ConfigureWSFS(), + // Provide permission config errors & warnings after initializing all variables + permissions.PermissionDiagnostics(), + mutator.SetRunAs(), + mutator.OverrideCompute(), + mutator.ConfigureDashboardDefaults(), + mutator.ConfigureVolumeDefaults(), + mutator.ProcessTargetMode(), + mutator.ApplyPresets(), + mutator.DefaultQueueing(), + mutator.ExpandPipelineGlobPaths(), - mutator.TranslatePaths(), - trampoline.WrapperWarning(), + // Configure use of WSFS for reads if the CLI is running on Databricks. + mutator.ConfigureWSFS(), - apps.Validate(), + mutator.TranslatePaths(), + trampoline.WrapperWarning(), - permissions.ValidateSharedRootPermissions(), - permissions.ApplyBundlePermissions(), - permissions.FilterCurrentUser(), + apps.Validate(), - metadata.AnnotateJobs(), - metadata.AnnotatePipelines(), - terraform.Initialize(), - scripts.Execute(config.ScriptPostInit), - }, + permissions.ValidateSharedRootPermissions(), + permissions.ApplyBundlePermissions(), + permissions.FilterCurrentUser(), + + metadata.AnnotateJobs(), + metadata.AnnotatePipelines(), + terraform.Initialize(), + scripts.Execute(config.ScriptPostInit), ) } diff --git a/bundle/phases/load.go b/bundle/phases/load.go index fa0668775..844bc0776 100644 --- a/bundle/phases/load.go +++ b/bundle/phases/load.go @@ -1,29 +1,40 @@ package phases import ( + "context" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/log" ) // The load phase loads configuration from disk and performs // lightweight preprocessing (anything that can be done without network I/O). -func Load() bundle.Mutator { - return newPhase( - "load", - mutator.DefaultMutators(), - ) +func Load(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + log.Info(ctx, "Phase: load") + + return mutator.DefaultMutators(ctx, b) } -func LoadDefaultTarget() bundle.Mutator { - return newPhase( - "load", - append(mutator.DefaultMutators(), mutator.SelectDefaultTarget()), - ) +func LoadDefaultTarget(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + log.Info(ctx, "Phase: load") + + diags := mutator.DefaultMutators(ctx, b) + if diags.HasError() { + return diags + } + + return diags.Extend(bundle.Apply(ctx, b, mutator.SelectDefaultTarget())) } -func LoadNamedTarget(target string) bundle.Mutator { - return newPhase( - "load", - append(mutator.DefaultMutators(), mutator.SelectTarget(target)), - ) +func LoadNamedTarget(ctx context.Context, b *bundle.Bundle, target string) diag.Diagnostics { + log.Info(ctx, "Phase: load") + + diags := mutator.DefaultMutators(ctx, b) + if diags.HasError() { + return diags + } + + return diags.Extend(bundle.Apply(ctx, b, mutator.SelectTarget(target))) } diff --git a/bundle/phases/phase.go b/bundle/phases/phase.go deleted file mode 100644 index 1bb4f86a2..000000000 --- a/bundle/phases/phase.go +++ /dev/null @@ -1,33 +0,0 @@ -// Package phases defines build phases as logical groups of [bundle.Mutator] instances. -package phases - -import ( - "context" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/libs/diag" - "github.com/databricks/cli/libs/log" -) - -// This phase type groups mutators that belong to a lifecycle phase. -// It expands into the specific mutators when applied. -type phase struct { - name string - mutators []bundle.Mutator -} - -func newPhase(name string, mutators []bundle.Mutator) bundle.Mutator { - return &phase{ - name: name, - mutators: mutators, - } -} - -func (p *phase) Name() string { - return p.name -} - -func (p *phase) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - log.Infof(ctx, "Phase: %s", p.Name()) - return bundle.Apply(ctx, b, bundle.Seq(p.mutators...)) -} diff --git a/bundle/run/app_test.go b/bundle/run/app_test.go index 77f197e8d..e988988f4 100644 --- a/bundle/run/app_test.go +++ b/bundle/run/app_test.go @@ -76,10 +76,10 @@ func setupBundle(t *testing.T) (context.Context, *bundle.Bundle, *mocks.MockWork ctx := cmdio.MockDiscard(context.Background()) - diags := bundle.Apply(ctx, b, bundle.Seq( + diags := bundle.ApplySeq(ctx, b, mutator.DefineDefaultWorkspacePaths(), mutator.TranslatePaths(), - )) + ) require.Empty(t, diags) return ctx, b, mwc diff --git a/bundle/seq.go b/bundle/seq.go deleted file mode 100644 index c1260a3f0..000000000 --- a/bundle/seq.go +++ /dev/null @@ -1,30 +0,0 @@ -package bundle - -import ( - "context" - - "github.com/databricks/cli/libs/diag" -) - -type seqMutator struct { - mutators []Mutator -} - -func (s *seqMutator) Name() string { - return "seq" -} - -func (s *seqMutator) Apply(ctx context.Context, b *Bundle) diag.Diagnostics { - var diags diag.Diagnostics - for _, m := range s.mutators { - diags = diags.Extend(Apply(ctx, b, m)) - if diags.HasError() { - break - } - } - return diags -} - -func Seq(ms ...Mutator) Mutator { - return &seqMutator{mutators: ms} -} diff --git a/bundle/seq_test.go b/bundle/seq_test.go deleted file mode 100644 index 74f975ed8..000000000 --- a/bundle/seq_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package bundle - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestSeqMutator(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, m2, m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, m3.applyCalled) -} - -func TestSeqWithDeferredMutator(t *testing.T) { - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - m4 := &testMutator{} - seqMutator := Seq(m1, Defer(m2, m3), m4) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.NoError(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, m3.applyCalled) - assert.Equal(t, 1, m4.applyCalled) -} - -func TestSeqWithErrorAndDeferredMutator(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: "error msg"} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(errorMut, Defer(m1, m2), m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.Error(t, diags.Error()) - - assert.Equal(t, 1, errorMut.applyCalled) - assert.Equal(t, 0, m1.applyCalled) - assert.Equal(t, 0, m2.applyCalled) - assert.Equal(t, 0, m3.applyCalled) -} - -func TestSeqWithErrorInsideDeferredMutator(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: "error msg"} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, Defer(errorMut, m2), m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.Error(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 0, m3.applyCalled) -} - -func TestSeqWithErrorInsideFinallyStage(t *testing.T) { - errorMut := &mutatorWithError{errorMsg: "error msg"} - m1 := &testMutator{} - m2 := &testMutator{} - m3 := &testMutator{} - seqMutator := Seq(m1, Defer(m2, errorMut), m3) - - b := &Bundle{} - diags := Apply(context.Background(), b, seqMutator) - assert.Error(t, diags.Error()) - - assert.Equal(t, 1, m1.applyCalled) - assert.Equal(t, 1, m2.applyCalled) - assert.Equal(t, 1, errorMut.applyCalled) - assert.Equal(t, 0, m3.applyCalled) -} diff --git a/bundle/tests/apps_test.go b/bundle/tests/apps_test.go index 7fee60d14..c3a0da2ca 100644 --- a/bundle/tests/apps_test.go +++ b/bundle/tests/apps_test.go @@ -13,11 +13,10 @@ func TestApps(t *testing.T) { b := load(t, "./apps") assert.Equal(t, "apps", b.Config.Bundle.Name) - diags := bundle.Apply(context.Background(), b, - bundle.Seq( - mutator.SetVariables(), - mutator.ResolveVariableReferences("variables"), - )) + diags := bundle.ApplySeq(context.Background(), b, + mutator.SetVariables(), + mutator.ResolveVariableReferences("variables"), + ) assert.Empty(t, diags) app := b.Config.Resources.Apps["my_app"] @@ -37,11 +36,10 @@ func TestAppsOverride(t *testing.T) { b := loadTarget(t, "./apps", "development") assert.Equal(t, "apps", b.Config.Bundle.Name) - diags := bundle.Apply(context.Background(), b, - bundle.Seq( - mutator.SetVariables(), - mutator.ResolveVariableReferences("variables"), - )) + diags := bundle.ApplySeq(context.Background(), b, + mutator.SetVariables(), + mutator.ResolveVariableReferences("variables"), + ) assert.Empty(t, diags) app := b.Config.Resources.Apps["my_app"] assert.Equal(t, "my-app", app.Name) diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index 15f8fcec1..07ec4a775 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -17,7 +17,7 @@ func TestIncludeInvalid(t *testing.T) { ctx := context.Background() b, err := bundle.Load(ctx, "./include_invalid") require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.Load()) + diags := phases.Load(ctx, b) require.Error(t, diags.Error()) assert.ErrorContains(t, diags.Error(), "notexists.yml defined in 'include' section does not match any files") } diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 9b246b7cc..6748e6409 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -20,7 +20,7 @@ func load(t *testing.T, path string) *bundle.Bundle { ctx := context.Background() b, err := bundle.Load(ctx, path) require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.Load()) + diags := phases.Load(ctx, b) require.NoError(t, diags.Error()) return b } @@ -38,8 +38,9 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) { return nil, diag.FromErr(err) } - diags := bundle.Apply(ctx, b, bundle.Seq( - phases.LoadNamedTarget(env), + diags := phases.LoadNamedTarget(ctx, b, env) + + diags = diags.Extend(bundle.ApplySeq(ctx, b, mutator.RewriteSyncPaths(), mutator.SyncDefaultPath(), mutator.SyncInferRoot(), @@ -69,10 +70,8 @@ func initializeTarget(t *testing.T, path, env string) (*bundle.Bundle, diag.Diag configureMock(t, b) ctx := dbr.MockRuntime(context.Background(), false) - diags := bundle.Apply(ctx, b, bundle.Seq( - mutator.SelectTarget(env), - phases.Initialize(), - )) + diags := bundle.Apply(ctx, b, mutator.SelectTarget(env)) + diags = diags.Extend(phases.Initialize(ctx, b)) return b, diags } diff --git a/bundle/tests/python_wheel_test.go b/bundle/tests/python_wheel_test.go index 06cb05270..22702ec44 100644 --- a/bundle/tests/python_wheel_test.go +++ b/bundle/tests/python_wheel_test.go @@ -18,7 +18,7 @@ func TestPythonWheelBuild(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -34,7 +34,7 @@ func TestPythonWheelBuildAutoDetect(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_no_artifact", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact/dist/my_test_code-*.whl") @@ -50,7 +50,7 @@ func TestPythonWheelBuildAutoDetectWithNotebookTask(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_notebook", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel_no_artifact_notebook/dist/my_test_code-*.whl") @@ -66,7 +66,7 @@ func TestPythonWheelWithDBFSLib(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_dbfs_lib", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) match := libraries.ExpandGlobReferences() @@ -78,7 +78,7 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_no_artifact_no_setup", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) mockFiler := mockfiler.NewMockFiler(t) @@ -90,10 +90,10 @@ func TestPythonWheelBuildNoBuildJustUpload(t *testing.T) { filer.CreateParentDirectories, ).Return(nil) - diags = bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.UploadWithClient(mockFiler), - )) + ) require.NoError(t, diags.Error()) require.Empty(t, diags) require.Equal(t, "/Workspace/foo/bar/.internal/my_test_code-0.0.1-py3-none-any.whl", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[0].Libraries[0].Whl) @@ -103,7 +103,7 @@ func TestPythonWheelBuildWithEnvironmentKey(t *testing.T) { b := loadTarget(t, "./python_wheel/environment_key", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/environment_key/my_test_code/dist/my_test_code-*.whl") @@ -119,7 +119,7 @@ func TestPythonWheelBuildMultiple(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_multiple", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) matches, err := filepath.Glob("./python_wheel/python_wheel_multiple/my_test_code/dist/my_test_code*.whl") @@ -135,7 +135,7 @@ func TestPythonWheelNoBuild(t *testing.T) { b := loadTarget(t, "./python_wheel/python_wheel_no_build", "default") ctx := context.Background() - diags := bundle.Apply(ctx, b, phases.Build()) + diags := phases.Build(ctx, b) require.NoError(t, diags.Error()) match := libraries.ExpandGlobReferences() diff --git a/bundle/tests/validate_test.go b/bundle/tests/validate_test.go index 9cd7c201b..a71b604b0 100644 --- a/bundle/tests/validate_test.go +++ b/bundle/tests/validate_test.go @@ -132,7 +132,7 @@ func TestValidateUniqueResourceIdentifiers(t *testing.T) { require.NoError(t, err) // The UniqueResourceKeys mutator is run as part of the Load phase. - diags := bundle.Apply(ctx, b, phases.Load()) + diags := phases.Load(ctx, b) assert.Equal(t, tc.diagnostics, diags) }) } diff --git a/bundle/trampoline/python_wheel.go b/bundle/trampoline/python_wheel.go index 075804479..0951b340c 100644 --- a/bundle/trampoline/python_wheel.go +++ b/bundle/trampoline/python_wheel.go @@ -8,8 +8,8 @@ import ( "strings" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -61,22 +61,30 @@ s = f.getvalue() dbutils.notebook.exit(s) ` +type transformWheelTask struct{} + +func (transformWheelTask) Name() string { + return "TransformWheelTask" +} + // This mutator takes the wheel task and transforms it into notebook // which installs uploaded wheels using %pip and then calling corresponding // entry point. func TransformWheelTask() bundle.Mutator { - return bundle.If( - func(_ context.Context, b *bundle.Bundle) (bool, error) { - res := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper - return res, nil - }, - NewTrampoline( - "python_wheel", - &pythonTrampoline{}, - NOTEBOOK_TEMPLATE, - ), - mutator.NoOp(), - ) + return transformWheelTask{} +} + +func (transformWheelTask) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { + isEnabled := b.Config.Experimental != nil && b.Config.Experimental.PythonWheelWrapper + if !isEnabled { + return nil + } + + return bundle.Apply(ctx, b, NewTrampoline( + "python_wheel", + &pythonTrampoline{}, + NOTEBOOK_TEMPLATE, + )) } type pythonTrampoline struct{} diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 560b07e39..407a14d8d 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -69,14 +69,19 @@ func newDeployCommand() *cobra.Command { } } - diags = diags.Extend( - bundle.Apply(ctx, b, bundle.Seq( - phases.Initialize(), - validate.FastValidate(), - phases.Build(), - phases.Deploy(outputHandler), - )), - ) + diags = diags.Extend(phases.Initialize(ctx, b)) + + if !diags.HasError() { + diags = diags.Extend(bundle.Apply(ctx, b, validate.FastValidate())) + } + + if !diags.HasError() { + diags = diags.Extend(phases.Build(ctx, b)) + } + + if !diags.HasError() { + diags = diags.Extend(phases.Deploy(ctx, b, outputHandler)) + } } renderOpts := render.RenderOptions{RenderSummaryTable: false} diff --git a/cmd/bundle/deployment/bind.go b/cmd/bundle/deployment/bind.go index 71f441d3d..b11984c51 100644 --- a/cmd/bundle/deployment/bind.go +++ b/cmd/bundle/deployment/bind.go @@ -53,15 +53,15 @@ func newBindCommand() *cobra.Command { return nil }) - diags = bundle.Apply(ctx, b, bundle.Seq( - phases.Initialize(), - phases.Bind(&terraform.BindOptions{ + diags = phases.Initialize(ctx, b) + if !diags.HasError() { + diags = diags.Extend(phases.Bind(ctx, b, &terraform.BindOptions{ AutoApprove: autoApprove, ResourceType: resource.TerraformResourceName(), ResourceKey: args[0], ResourceId: args[1], - }), - )) + })) + } if err := diags.Error(); err != nil { return fmt.Errorf("failed to bind the resource, err: %w", err) } diff --git a/cmd/bundle/deployment/unbind.go b/cmd/bundle/deployment/unbind.go index 9de5285a5..3fe5fbce6 100644 --- a/cmd/bundle/deployment/unbind.go +++ b/cmd/bundle/deployment/unbind.go @@ -38,10 +38,10 @@ func newUnbindCommand() *cobra.Command { return nil }) - diags = bundle.Apply(cmd.Context(), b, bundle.Seq( - phases.Initialize(), - phases.Unbind(resource.TerraformResourceName(), args[0]), - )) + diags = phases.Initialize(ctx, b) + if !diags.HasError() { + diags = diags.Extend(phases.Unbind(ctx, b, resource.TerraformResourceName(), args[0])) + } if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index 0b2f14875..82580f994 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -61,20 +61,25 @@ func newDestroyCommand() *cobra.Command { return errors.New("please specify --auto-approve since selected logging format is json") } - diags = bundle.Apply(ctx, b, bundle.Seq( - phases.Initialize(), - // We need to resolve artifact variable (how we do it in build phase) - // because some of the to-be-destroyed resource might use this variable. - // Not resolving might lead to terraform "Reference to undeclared resource" error - mutator.ResolveVariableReferences( - "artifacts", - ), - phases.Destroy(), - )) + diags = phases.Initialize(ctx, b) if err := diags.Error(); err != nil { return err } - return nil + + diags = diags.Extend( + // We need to resolve artifact variable (how we do it in build phase) + // because some of the to-be-destroyed resource might use this variable. + // Not resolving might lead to terraform "Reference to undeclared resource" error + bundle.Apply(ctx, b, mutator.ResolveVariableReferences("artifacts")), + ) + + if err := diags.Error(); err != nil { + return err + } + + diags = diags.Extend(phases.Destroy(ctx, b)) + // QQQ we're not reporting warnings there. This would be addressed by switching to streaming warnings/errors instead of accumulating. + return diags.Error() } return cmd diff --git a/cmd/bundle/generate/dashboard.go b/cmd/bundle/generate/dashboard.go index d56d246c2..92cd2f164 100644 --- a/cmd/bundle/generate/dashboard.go +++ b/cmd/bundle/generate/dashboard.go @@ -345,8 +345,12 @@ func (d *dashboard) initialize(b *bundle.Bundle) diag.Diagnostics { } func (d *dashboard) runForResource(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - diags := bundle.Apply(ctx, b, bundle.Seq( - phases.Initialize(), + diags := phases.Initialize(ctx, b) + if diags.HasError() { + return diags + } + + diags = diags.Extend(bundle.ApplySeq(ctx, b, terraform.Interpolate(), terraform.Write(), terraform.StatePull(), diff --git a/cmd/bundle/open.go b/cmd/bundle/open.go index 5a26e1ea7..733758a8e 100644 --- a/cmd/bundle/open.go +++ b/cmd/bundle/open.go @@ -67,7 +67,7 @@ func newOpenCommand() *cobra.Command { return diags.Error() } - diags = bundle.Apply(ctx, b, phases.Initialize()) + diags = phases.Initialize(ctx, b) if err := diags.Error(); err != nil { return err } @@ -86,20 +86,20 @@ func newOpenCommand() *cobra.Command { noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) if forcePull || noCache { - diags = bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.ApplySeq(ctx, b, terraform.StatePull(), terraform.Interpolate(), terraform.Write(), - )) + ) if err := diags.Error(); err != nil { return err } } - diags = bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.ApplySeq(ctx, b, terraform.Load(), mutator.InitializeURLs(), - )) + ) if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index ffb9c1b88..574ad1016 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -111,7 +111,7 @@ task or a Python wheel task, the second example applies. return diags.Error() } - diags = bundle.Apply(ctx, b, phases.Initialize()) + diags = phases.Initialize(ctx, b) if err := diags.Error(); err != nil { return err } @@ -121,12 +121,12 @@ task or a Python wheel task, the second example applies. return err } - diags = bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.ApplySeq(ctx, b, terraform.Interpolate(), terraform.Write(), terraform.StatePull(), terraform.Load(terraform.ErrorOnEmptyState), - )) + ) if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/summary.go b/cmd/bundle/summary.go index 2871c82ff..a0e93b78b 100644 --- a/cmd/bundle/summary.go +++ b/cmd/bundle/summary.go @@ -35,7 +35,7 @@ func newSummaryCommand() *cobra.Command { return diags.Error() } - diags = bundle.Apply(ctx, b, phases.Initialize()) + diags = phases.Initialize(ctx, b) if err := diags.Error(); err != nil { return err } @@ -49,18 +49,20 @@ func newSummaryCommand() *cobra.Command { noCache := errors.Is(stateFileErr, os.ErrNotExist) || errors.Is(configFileErr, os.ErrNotExist) if forcePull || noCache { - diags = bundle.Apply(ctx, b, bundle.Seq( + diags = bundle.ApplySeq(ctx, b, terraform.StatePull(), terraform.Interpolate(), terraform.Write(), - )) + ) if err := diags.Error(); err != nil { return err } } - diags = bundle.Apply(ctx, b, - bundle.Seq(terraform.Load(), mutator.InitializeURLs())) + diags = bundle.ApplySeq(ctx, b, + terraform.Load(), + mutator.InitializeURLs(), + ) if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 274bba0e0..3ada07b74 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -71,7 +71,7 @@ func newSyncCommand() *cobra.Command { } // Run initialize phase to make sure paths are set. - diags = bundle.Apply(ctx, b, phases.Initialize()) + diags = phases.Initialize(ctx, b) if err := diags.Error(); err != nil { return err } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index c45453af6..0ff9c7867 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -46,7 +46,7 @@ func newValidateCommand() *cobra.Command { } if !diags.HasError() { - diags = diags.Extend(bundle.Apply(ctx, b, phases.Initialize())) + diags = diags.Extend(phases.Initialize(ctx, b)) } if !diags.HasError() { diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 3037546c3..b40803707 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -74,16 +74,15 @@ func configureProfile(cmd *cobra.Command, b *bundle.Bundle) diag.Diagnostics { // configureBundle loads the bundle configuration and configures flag values, if any. func configureBundle(cmd *cobra.Command, b *bundle.Bundle) (*bundle.Bundle, diag.Diagnostics) { - var m bundle.Mutator - if target := getTarget(cmd); target == "" { - m = phases.LoadDefaultTarget() - } else { - m = phases.LoadNamedTarget(target) - } - // Load bundle and select target. ctx := cmd.Context() - diags := bundle.Apply(ctx, b, m) + var diags diag.Diagnostics + if target := getTarget(cmd); target == "" { + diags = phases.LoadDefaultTarget(ctx, b) + } else { + diags = phases.LoadNamedTarget(ctx, b, target) + } + if diags.HasError() { return b, diags } @@ -159,7 +158,7 @@ func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]s } // Load bundle but don't select a target (we're completing those). - diags := bundle.Apply(ctx, b, phases.Load()) + diags := phases.Load(ctx, b) if err := diags.Error(); err != nil { cobra.CompErrorln(err.Error()) return nil, cobra.ShellCompDirectiveError diff --git a/cmd/root/root.go b/cmd/root/root.go index f20401e73..eb2c66625 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -8,6 +8,7 @@ import ( "log/slog" "os" "runtime" + "runtime/debug" "slices" "strings" "time" @@ -102,14 +103,39 @@ func flagErrorFunc(c *cobra.Command, err error) error { // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. -func Execute(ctx context.Context, cmd *cobra.Command) error { - // TODO: deferred panic recovery +func Execute(ctx context.Context, cmd *cobra.Command) (err error) { + defer func() { + r := recover() + + // No panic. Return normally. + if r == nil { + return + } + + version := build.GetInfo().Version + trace := debug.Stack() + + // Set the error so that the CLI exits with a non-zero exit code. + err = fmt.Errorf("panic: %v", r) + + fmt.Fprintf(cmd.ErrOrStderr(), `The Databricks CLI unexpectedly had a fatal error. +Please report this issue to Databricks in the form of a GitHub issue at: +https://github.com/databricks/cli + +CLI Version: %s + +Panic Payload: %v + +Stack Trace: +%s`, version, r, string(trace)) + }() + ctx = telemetry.WithNewLogger(ctx) ctx = dbr.DetectRuntime(ctx) startTime := time.Now() // Run the command - cmd, err := cmd.ExecuteContextC(ctx) + cmd, err = cmd.ExecuteContextC(ctx) if err != nil && !errors.Is(err, ErrAlreadyPrinted) { // If cmdio logger initialization succeeds, then this function logs with the // initialized cmdio logger, otherwise with the default cmdio logger diff --git a/cmd/selftest/panic.go b/cmd/selftest/panic.go new file mode 100644 index 000000000..58d8b24e5 --- /dev/null +++ b/cmd/selftest/panic.go @@ -0,0 +1,12 @@ +package selftest + +import "github.com/spf13/cobra" + +func newPanic() *cobra.Command { + return &cobra.Command{ + Use: "panic", + Run: func(cmd *cobra.Command, args []string) { + panic("the databricks selftest panic command always panics") + }, + } +} diff --git a/cmd/selftest/selftest.go b/cmd/selftest/selftest.go index 64cc562d5..b3e8541d4 100644 --- a/cmd/selftest/selftest.go +++ b/cmd/selftest/selftest.go @@ -12,5 +12,6 @@ func New() *cobra.Command { } cmd.AddCommand(newSendTelemetry()) + cmd.AddCommand(newPanic()) return cmd } diff --git a/integration/bundle/artifacts_test.go b/integration/bundle/artifacts_test.go index 94b96899e..125b5febd 100644 --- a/integration/bundle/artifacts_test.go +++ b/integration/bundle/artifacts_test.go @@ -80,7 +80,7 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { }, } - diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) + diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload()) require.NoError(t, diags.Error()) // The remote path attribute on the artifact file should have been set. @@ -144,7 +144,7 @@ func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) { }, } - diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) + diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload()) require.NoError(t, diags.Error()) // The remote path attribute on the artifact file should have been set. @@ -213,7 +213,7 @@ func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) { }, } - diags := bundle.Apply(ctx, b, bundle.Seq(libraries.ExpandGlobReferences(), libraries.Upload())) + diags := bundle.ApplySeq(ctx, b, libraries.ExpandGlobReferences(), libraries.Upload()) require.NoError(t, diags.Error()) // The remote path attribute on the artifact file should have been set. diff --git a/integration/bundle/spark_jar_test.go b/integration/bundle/spark_jar_test.go index cbdf5a00c..fc2fe0d0f 100644 --- a/integration/bundle/spark_jar_test.go +++ b/integration/bundle/spark_jar_test.go @@ -11,6 +11,50 @@ import ( "github.com/stretchr/testify/require" ) +// sparkJarTestCase defines a Databricks runtime version and a local Java version requirement +type sparkJarTestCase struct { + name string // Test name + runtimeVersion string // The Spark runtime version to test + requiredJavaVersion string // Java version that can compile jar to pass this test +} + +// runSparkJarTests runs a set of test cases with appropriate Java version checks +// testRunner is the function that runs the actual test with the runtime version +func runSparkJarTests(t *testing.T, testCases []sparkJarTestCase, testRunner func(t *testing.T, runtimeVersion string)) { + t.Helper() + + testCanRun := make(map[string]bool) + atLeastOneCanRun := false + for _, tc := range testCases { + if testutil.HasJDK(t, context.Background(), tc.requiredJavaVersion) { + testCanRun[tc.name] = true + atLeastOneCanRun = true + continue + } + testCanRun[tc.name] = false + } + + if !atLeastOneCanRun { + t.Fatal("At least one test is required to pass. All tests were skipped because no compatible Java version was found.") + } + + // Run the tests that can run + for _, tc := range testCases { + tc := tc // Capture range variable for goroutine + canRun := testCanRun[tc.name] + + t.Run(tc.name, func(t *testing.T) { + if !canRun { + t.Skipf("Skipping %s: requires Java version %v", tc.name, tc.requiredJavaVersion) + return + } + + t.Parallel() + testRunner(t, tc.runtimeVersion) + }) + } +} + func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) { nodeTypeId := testutil.GetCloud(t).NodeTypeID() tmpDir := t.TempDir() @@ -54,46 +98,60 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) { } func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { - testutil.RequireJDK(t, context.Background(), "1.8.0") - // Failure on earlier DBR versions: // // JAR installation from Volumes is supported on UC Clusters with DBR >= 13.3. // Denied library is Jar(/Volumes/main/test-schema-ldgaklhcahlg/my-volume/.internal/PrintArgs.jar) // - versions := []string{ - "13.3.x-scala2.12", // 13.3 LTS (includes Apache Spark 3.4.1, Scala 2.12) - "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) - "15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12) - } - - for _, version := range versions { - t.Run(version, func(t *testing.T) { - t.Parallel() - runSparkJarTestFromVolume(t, version) - }) + testCases := []sparkJarTestCase{ + { + name: "Databricks Runtime 13.3 LTS", + runtimeVersion: "13.3.x-scala2.12", // 13.3 LTS (includes Apache Spark 3.4.1, Scala 2.12) + requiredJavaVersion: "1.8.0", // Only JDK 8 is supported + }, + { + name: "Databricks Runtime 14.3 LTS", + runtimeVersion: "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) + requiredJavaVersion: "1.8.0", // Only JDK 8 is supported + }, + { + name: "Databricks Runtime 15.4 LTS", + runtimeVersion: "15.4.x-scala2.12", // 15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12) + requiredJavaVersion: "1.8.0", // Only JDK 8 is supported + }, + { + name: "Databricks Runtime 16.2", + runtimeVersion: "16.2.x-scala2.12", // 16.2 (includes Apache Spark 3.5.2, Scala 2.12) + requiredJavaVersion: "11.0", // Can run jars compiled by Java 11 + }, } + runSparkJarTests(t, testCases, runSparkJarTestFromVolume) } func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { - testutil.RequireJDK(t, context.Background(), "1.8.0") - // Failure on earlier DBR versions: // // Library from /Workspace is not allowed on this cluster. // Please switch to using DBR 14.1+ No Isolation Shared or DBR 13.1+ Shared cluster or 13.2+ Assigned cluster to use /Workspace libraries. // - versions := []string{ - "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) - "15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12) - } - - for _, version := range versions { - t.Run(version, func(t *testing.T) { - t.Parallel() - runSparkJarTestFromWorkspace(t, version) - }) + testCases := []sparkJarTestCase{ + { + name: "Databricks Runtime 14.3 LTS", + runtimeVersion: "14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12) + requiredJavaVersion: "1.8.0", // Only JDK 8 is supported + }, + { + name: "Databricks Runtime 15.4 LTS", + runtimeVersion: "15.4.x-scala2.12", // 15.4 LTS (includes Apache Spark 3.5.0, Scala 2.12) + requiredJavaVersion: "1.8.0", // Only JDK 8 is supported + }, + { + name: "Databricks Runtime 16.2", + runtimeVersion: "16.2.x-scala2.12", // 16.2 (includes Apache Spark 3.5.2, Scala 2.12) + requiredJavaVersion: "11.0", // Can run jars compiled by Java 11 + }, } + runSparkJarTests(t, testCases, runSparkJarTestFromWorkspace) } diff --git a/internal/testutil/jdk.go b/internal/testutil/jdk.go index 60fa439db..6c3277719 100644 --- a/internal/testutil/jdk.go +++ b/internal/testutil/jdk.go @@ -1,23 +1,44 @@ package testutil import ( - "bytes" "context" "os/exec" "strings" - - "github.com/stretchr/testify/require" ) -func RequireJDK(t TestingT, ctx context.Context, version string) { - var stderr bytes.Buffer +// HasJDK checks if the specified Java version is available in the system. +// It returns true if the required JDK version is present, false otherwise. +// This is a non-failing variant of RequireJDK. +// +// Example output of `java -version` in eclipse-temurin:8: +// openjdk version "1.8.0_442" +// OpenJDK Runtime Environment (Temurin)(build 1.8.0_442-b06) +// OpenJDK 64-Bit Server VM (Temurin)(build 25.442-b06, mixed mode) +// +// Example output of `java -version` in java11 (homebrew): +// openjdk version "11.0.26" 2025-01-21 +// OpenJDK Runtime Environment Homebrew (build 11.0.26+0) +// OpenJDK 64-Bit Server VM Homebrew (build 11.0.26+0, mixed mode) +func HasJDK(t TestingT, ctx context.Context, version string) bool { + t.Helper() - cmd := exec.Command("javac", "-version") - cmd.Stderr = &stderr - err := cmd.Run() - require.NoError(t, err, "Unable to run javac -version") + // Try to execute "java -version" command + cmd := exec.CommandContext(ctx, "java", "-version") + output, err := cmd.CombinedOutput() + if err != nil { + t.Logf("Failed to execute java -version: %v", err) + return false + } - // Get the first line of the output - line := strings.Split(stderr.String(), "\n")[0] - require.Contains(t, line, version, "Expected JDK version %s, got %s", version, line) + javaVersionOutput := string(output) + + // Check if the output contains the expected version + expectedVersionString := "version \"" + version + if strings.Contains(javaVersionOutput, expectedVersionString) { + t.Logf("Detected JDK version %s", version) + return true + } + + t.Logf("Required JDK version %s not found, instead got: %s", version, javaVersionOutput) + return false } diff --git a/libs/auth/env.go b/libs/auth/env.go index 66744d6a1..db91c503b 100644 --- a/libs/auth/env.go +++ b/libs/auth/env.go @@ -38,3 +38,17 @@ func EnvVars() []string { return out } + +func GetEnvFor(name string) (string, bool) { + for _, attr := range config.ConfigAttributes { + if attr.Name != name { + continue + } + if len(attr.EnvVars) == 0 { + return "", false + } + return attr.EnvVars[0], true + } + + return "", false +} diff --git a/libs/auth/env_test.go b/libs/auth/env_test.go index b147b74bb..b0ab1fbaf 100644 --- a/libs/auth/env_test.go +++ b/libs/auth/env_test.go @@ -77,3 +77,42 @@ func TestAuthEnvVars(t *testing.T) { out := EnvVars() assert.Equal(t, expected, out) } + +func TestGetEnvFor(t *testing.T) { + tcases := []struct { + name string + expected string + }{ + // Generic attributes. + {"host", "DATABRICKS_HOST"}, + {"profile", "DATABRICKS_CONFIG_PROFILE"}, + {"auth_type", "DATABRICKS_AUTH_TYPE"}, + {"metadata_service_url", "DATABRICKS_METADATA_SERVICE_URL"}, + + // OAuth specific attributes. + {"client_id", "DATABRICKS_CLIENT_ID"}, + + // Google specific attributes. + {"google_service_account", "DATABRICKS_GOOGLE_SERVICE_ACCOUNT"}, + + // Azure specific attributes. + {"azure_workspace_resource_id", "DATABRICKS_AZURE_RESOURCE_ID"}, + {"azure_use_msi", "ARM_USE_MSI"}, + {"azure_client_id", "ARM_CLIENT_ID"}, + {"azure_tenant_id", "ARM_TENANT_ID"}, + {"azure_environment", "ARM_ENVIRONMENT"}, + {"azure_login_app_id", "DATABRICKS_AZURE_LOGIN_APP_ID"}, + } + + for _, tcase := range tcases { + t.Run(tcase.name, func(t *testing.T) { + out, ok := GetEnvFor(tcase.name) + assert.True(t, ok) + assert.Equal(t, tcase.expected, out) + }) + } + + out, ok := GetEnvFor("notfound") + assert.False(t, ok) + assert.Empty(t, out) +} diff --git a/libs/dyn/dynvar/ref.go b/libs/dyn/dynvar/ref.go index ba397267a..21ec00fda 100644 --- a/libs/dyn/dynvar/ref.go +++ b/libs/dyn/dynvar/ref.go @@ -76,6 +76,10 @@ func IsPureVariableReference(s string) bool { return len(s) > 0 && re.FindString(s) == s } +func ContainsVariableReference(s string) bool { + return re.MatchString(s) +} + // If s is a pure variable reference, this function returns the corresponding // dyn.Path. Otherwise, it returns false. func PureReferenceToPath(s string) (dyn.Path, bool) { diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index f9588edd1..97030324b 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -78,7 +78,7 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri b, err := bundle.Load(ctx, filepath.Join(tempDir, "my_project")) require.NoError(t, err) - diags := bundle.Apply(ctx, b, phases.LoadNamedTarget(target)) + diags := phases.LoadNamedTarget(ctx, b, target) require.NoError(t, diags.Error()) // Apply initialize / validation mutators @@ -93,14 +93,12 @@ func assertBuiltinTemplateValid(t *testing.T, template string, settings map[stri b.Tagging = tags.ForCloud(w.Config) b.WorkspaceClient() - diags = bundle.Apply(ctx, b, bundle.Seq( - phases.Initialize(), - )) + diags = phases.Initialize(ctx, b) require.NoError(t, diags.Error()) // Apply build mutator if build { - diags = bundle.Apply(ctx, b, phases.Build()) + diags = phases.Build(ctx, b) require.NoError(t, diags.Error()) } } diff --git a/libs/testserver/server.go b/libs/testserver/server.go index a10ddf4d8..4aa2d2dc0 100644 --- a/libs/testserver/server.go +++ b/libs/testserver/server.go @@ -2,6 +2,7 @@ package testserver import ( "encoding/json" + "fmt" "io" "net/http" "net/http/httptest" @@ -191,24 +192,23 @@ func New(t testutil.TestingT) *Server { // Set up the not found handler as fallback router.NotFoundHandler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { pattern := r.Method + " " + r.URL.Path + bodyBytes, err := io.ReadAll(r.Body) + var body string + if err != nil { + body = fmt.Sprintf("failed to read the body: %s", err) + } else { + body = fmt.Sprintf("[%d bytes] %s", len(bodyBytes), bodyBytes) + } - t.Errorf(` + t.Errorf(`No handler for URL: %s +Body: %s ----------------------------------------- -No stub found for pattern: %s - -To stub a response for this request, you can add -the following to test.toml: +For acceptance tests, add this to test.toml: [[Server]] Pattern = %q -Response.Body = ''' - -''' -Response.StatusCode = ----------------------------------------- - - -`, pattern, pattern) +Response.Body = '' +# Response.StatusCode = +`, r.URL, body, pattern) w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusNotImplemented)