From b2c87ae5d047220ef778417664ec4fa1f7cbd50f Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Fri, 14 Mar 2025 09:52:20 +0100 Subject: [PATCH] acc: Simplify integration_whl tests (#2479) ## Changes Instead of full templates, only template databricks.yml and use envsubst for that. --- .../base/{template => }/databricks.yml.tmpl | 16 +++++----- .../base/databricks_template_schema.json | 29 ------------------- .../bundle/integration_whl/base/empty.yml | 0 .../__init__.py | 0 .../__main__.py | 0 .../bundle/integration_whl/base/output.txt | 20 ++++--------- .../base/python_wheel_wrapper.yml | 2 ++ acceptance/bundle/integration_whl/base/script | 7 ++--- .../bundle/integration_whl/base/setup.py | 15 ++++++++++ .../base/template/setup.py.tmpl | 15 ---------- .../integration_whl/custom_params/output.txt | 20 ++++--------- .../integration_whl/custom_params/script | 8 ++--- .../{template => }/databricks.yml.tmpl | 10 +++---- .../databricks_template_schema.json | 25 ---------------- .../__init__.py | 0 .../__main__.py | 0 .../interactive_cluster/output.txt | 11 ------- .../interactive_cluster/script | 6 +--- .../interactive_cluster/setup.py | 15 ++++++++++ .../template/setup.py.tmpl | 15 ---------- acceptance/bundle/integration_whl/test.toml | 3 +- .../bundle/integration_whl/wrapper/output.txt | 23 ++++----------- .../bundle/integration_whl/wrapper/script | 9 +++--- .../wrapper_custom_params/output.txt | 23 ++++----------- .../wrapper_custom_params/script | 9 +++--- 25 files changed, 80 insertions(+), 201 deletions(-) rename acceptance/bundle/integration_whl/base/{template => }/databricks.yml.tmpl (56%) delete mode 100644 acceptance/bundle/integration_whl/base/databricks_template_schema.json create mode 100644 acceptance/bundle/integration_whl/base/empty.yml rename acceptance/bundle/integration_whl/base/{template/{{.project_name}} => my_test_code}/__init__.py (100%) rename acceptance/bundle/integration_whl/base/{template/{{.project_name}} => my_test_code}/__main__.py (100%) create mode 100644 acceptance/bundle/integration_whl/base/python_wheel_wrapper.yml create mode 100644 acceptance/bundle/integration_whl/base/setup.py delete mode 100644 acceptance/bundle/integration_whl/base/template/setup.py.tmpl rename acceptance/bundle/integration_whl/interactive_cluster/{template => }/databricks.yml.tmpl (68%) delete mode 100644 acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json rename acceptance/bundle/integration_whl/interactive_cluster/{template/{{.project_name}} => my_test_code}/__init__.py (100%) rename acceptance/bundle/integration_whl/interactive_cluster/{template/{{.project_name}} => my_test_code}/__main__.py (100%) create mode 100644 acceptance/bundle/integration_whl/interactive_cluster/setup.py delete mode 100644 acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl diff --git a/acceptance/bundle/integration_whl/base/template/databricks.yml.tmpl b/acceptance/bundle/integration_whl/base/databricks.yml.tmpl similarity index 56% rename from acceptance/bundle/integration_whl/base/template/databricks.yml.tmpl rename to acceptance/bundle/integration_whl/base/databricks.yml.tmpl index 30b0a5eae..7298e666a 100644 --- a/acceptance/bundle/integration_whl/base/template/databricks.yml.tmpl +++ b/acceptance/bundle/integration_whl/base/databricks.yml.tmpl @@ -2,25 +2,23 @@ bundle: name: wheel-task workspace: - root_path: "~/.bundle/{{.unique_id}}" + root_path: "~/.bundle/$UNIQUE_NAME" -{{if .python_wheel_wrapper}} -experimental: - python_wheel_wrapper: true -{{end}} +include: + - $EXTRA_CONFIG resources: jobs: some_other_job: - name: "[${bundle.target}] Test Wheel Job {{.unique_id}}" + name: "[${bundle.target}] Test Wheel Job $UNIQUE_NAME" tasks: - task_key: TestTask new_cluster: num_workers: 1 - spark_version: "{{.spark_version}}" - node_type_id: "{{.node_type_id}}" + spark_version: $DEFAULT_SPARK_VERSION + node_type_id: $NODE_TYPE_ID data_security_mode: USER_ISOLATION - instance_pool_id: "{{.instance_pool_id}}" + instance_pool_id: $TEST_INSTANCE_POOL_ID python_wheel_task: package_name: my_test_code entry_point: run diff --git a/acceptance/bundle/integration_whl/base/databricks_template_schema.json b/acceptance/bundle/integration_whl/base/databricks_template_schema.json deleted file mode 100644 index c4a74df07..000000000 --- a/acceptance/bundle/integration_whl/base/databricks_template_schema.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "properties": { - "project_name": { - "type": "string", - "default": "my_test_code", - "description": "Unique name for this project" - }, - "spark_version": { - "type": "string", - "description": "Spark version used for job cluster" - }, - "node_type_id": { - "type": "string", - "description": "Node type id for job cluster" - }, - "unique_id": { - "type": "string", - "description": "Unique ID for job name" - }, - "python_wheel_wrapper": { - "type": "boolean", - "description": "Whether or not to enable python wheel wrapper" - }, - "instance_pool_id": { - "type": "string", - "description": "Instance pool id for job cluster" - } - } -} diff --git a/acceptance/bundle/integration_whl/base/empty.yml b/acceptance/bundle/integration_whl/base/empty.yml new file mode 100644 index 000000000..e69de29bb diff --git a/acceptance/bundle/integration_whl/base/template/{{.project_name}}/__init__.py b/acceptance/bundle/integration_whl/base/my_test_code/__init__.py similarity index 100% rename from acceptance/bundle/integration_whl/base/template/{{.project_name}}/__init__.py rename to acceptance/bundle/integration_whl/base/my_test_code/__init__.py diff --git a/acceptance/bundle/integration_whl/base/template/{{.project_name}}/__main__.py b/acceptance/bundle/integration_whl/base/my_test_code/__main__.py similarity index 100% rename from acceptance/bundle/integration_whl/base/template/{{.project_name}}/__main__.py rename to acceptance/bundle/integration_whl/base/my_test_code/__main__.py diff --git a/acceptance/bundle/integration_whl/base/output.txt b/acceptance/bundle/integration_whl/base/output.txt index d42daeaf3..34ae35e40 100644 --- a/acceptance/bundle/integration_whl/base/output.txt +++ b/acceptance/bundle/integration_whl/base/output.txt @@ -1,15 +1,4 @@ ->>> cat input.json -{ - "project_name": "my_test_code", - "spark_version": "13.3.x-snapshot-scala2.12", - "node_type_id": "[NODE_TYPE_ID]", - "unique_id": "[UNIQUE_NAME]", - "python_wheel_wrapper": false, - "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" -} -✨ Successfully initialized template - >>> cat databricks.yml bundle: name: wheel-task @@ -17,7 +6,8 @@ bundle: workspace: root_path: "~/.bundle/[UNIQUE_NAME]" - +include: + - empty.yml resources: jobs: @@ -27,10 +17,10 @@ resources: - task_key: TestTask new_cluster: num_workers: 1 - spark_version: "13.3.x-snapshot-scala2.12" - node_type_id: "[NODE_TYPE_ID]" + spark_version: 13.3.x-snapshot-scala2.12 + node_type_id: [NODE_TYPE_ID] data_security_mode: USER_ISOLATION - instance_pool_id: "[TEST_INSTANCE_POOL_ID]" + instance_pool_id: [TEST_INSTANCE_POOL_ID] python_wheel_task: package_name: my_test_code entry_point: run diff --git a/acceptance/bundle/integration_whl/base/python_wheel_wrapper.yml b/acceptance/bundle/integration_whl/base/python_wheel_wrapper.yml new file mode 100644 index 000000000..cc4b9c51c --- /dev/null +++ b/acceptance/bundle/integration_whl/base/python_wheel_wrapper.yml @@ -0,0 +1,2 @@ +experimental: + python_wheel_wrapper: true diff --git a/acceptance/bundle/integration_whl/base/script b/acceptance/bundle/integration_whl/base/script index 19418f5d4..4e5e5f360 100644 --- a/acceptance/bundle/integration_whl/base/script +++ b/acceptance/bundle/integration_whl/base/script @@ -1,8 +1,5 @@ -export SPARK_VERSION=$DEFAULT_SPARK_VERSION -export PYTHON_WHEEL_WRAPPER=false -envsubst < input.json.tmpl > input.json -trace cat input.json -$CLI bundle init . --config-file input.json +export EXTRA_CONFIG=empty.yml +envsubst < databricks.yml.tmpl > databricks.yml trace cat databricks.yml trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT trace $CLI bundle deploy diff --git a/acceptance/bundle/integration_whl/base/setup.py b/acceptance/bundle/integration_whl/base/setup.py new file mode 100644 index 000000000..8b48a92b4 --- /dev/null +++ b/acceptance/bundle/integration_whl/base/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my example wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/acceptance/bundle/integration_whl/base/template/setup.py.tmpl b/acceptance/bundle/integration_whl/base/template/setup.py.tmpl deleted file mode 100644 index b528657b1..000000000 --- a/acceptance/bundle/integration_whl/base/template/setup.py.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -from setuptools import setup, find_packages - -import {{.project_name}} - -setup( - name="{{.project_name}}", - version={{.project_name}}.__version__, - author={{.project_name}}.__author__, - url="https://databricks.com", - author_email="john.doe@databricks.com", - description="my example wheel", - packages=find_packages(include=["{{.project_name}}"]), - entry_points={"group1": "run={{.project_name}}.__main__:main"}, - install_requires=["setuptools"], -) diff --git a/acceptance/bundle/integration_whl/custom_params/output.txt b/acceptance/bundle/integration_whl/custom_params/output.txt index 19c1d87ce..5ca3027f1 100644 --- a/acceptance/bundle/integration_whl/custom_params/output.txt +++ b/acceptance/bundle/integration_whl/custom_params/output.txt @@ -1,15 +1,4 @@ ->>> cat input.json -{ - "project_name": "my_test_code", - "spark_version": "13.3.x-snapshot-scala2.12", - "node_type_id": "[NODE_TYPE_ID]", - "unique_id": "[UNIQUE_NAME]", - "python_wheel_wrapper": false, - "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" -} -✨ Successfully initialized template - >>> cat databricks.yml bundle: name: wheel-task @@ -17,7 +6,8 @@ bundle: workspace: root_path: "~/.bundle/[UNIQUE_NAME]" - +include: + - empty.yml resources: jobs: @@ -27,10 +17,10 @@ resources: - task_key: TestTask new_cluster: num_workers: 1 - spark_version: "13.3.x-snapshot-scala2.12" - node_type_id: "[NODE_TYPE_ID]" + spark_version: 13.3.x-snapshot-scala2.12 + node_type_id: [NODE_TYPE_ID] data_security_mode: USER_ISOLATION - instance_pool_id: "[TEST_INSTANCE_POOL_ID]" + instance_pool_id: [TEST_INSTANCE_POOL_ID] python_wheel_task: package_name: my_test_code entry_point: run diff --git a/acceptance/bundle/integration_whl/custom_params/script b/acceptance/bundle/integration_whl/custom_params/script index da7ba68f8..246c90ae0 100644 --- a/acceptance/bundle/integration_whl/custom_params/script +++ b/acceptance/bundle/integration_whl/custom_params/script @@ -1,8 +1,6 @@ -export SPARK_VERSION=$DEFAULT_SPARK_VERSION -export PYTHON_WHEEL_WRAPPER=false -envsubst < $TESTDIR/../base/input.json.tmpl > input.json -trace cat input.json -$CLI bundle init $TESTDIR/../base --config-file input.json +export EXTRA_CONFIG=empty.yml +envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml +cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} . trace cat databricks.yml trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT trace $CLI bundle deploy diff --git a/acceptance/bundle/integration_whl/interactive_cluster/template/databricks.yml.tmpl b/acceptance/bundle/integration_whl/interactive_cluster/databricks.yml.tmpl similarity index 68% rename from acceptance/bundle/integration_whl/interactive_cluster/template/databricks.yml.tmpl rename to acceptance/bundle/integration_whl/interactive_cluster/databricks.yml.tmpl index bb2d3d7d2..85e44eb6a 100644 --- a/acceptance/bundle/integration_whl/interactive_cluster/template/databricks.yml.tmpl +++ b/acceptance/bundle/integration_whl/interactive_cluster/databricks.yml.tmpl @@ -2,20 +2,20 @@ bundle: name: wheel-task workspace: - root_path: "~/.bundle/{{.unique_id}}" + root_path: "~/.bundle/$UNIQUE_NAME" resources: clusters: test_cluster: - cluster_name: "test-cluster-{{.unique_id}}" - spark_version: "{{.spark_version}}" - node_type_id: "{{.node_type_id}}" + cluster_name: "test-cluster-$UNIQUE_NAME" + spark_version: "$DEFAULT_SPARK_VERSION" + node_type_id: "$NODE_TYPE_ID" num_workers: 1 data_security_mode: USER_ISOLATION jobs: some_other_job: - name: "[${bundle.target}] Test Wheel Job {{.unique_id}}" + name: "[${bundle.target}] Test Wheel Job $UNIQUE_NAME" tasks: - task_key: TestTask existing_cluster_id: "${resources.clusters.test_cluster.cluster_id}" diff --git a/acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json b/acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json deleted file mode 100644 index 621dff6aa..000000000 --- a/acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "properties": { - "project_name": { - "type": "string", - "default": "my_test_code", - "description": "Unique name for this project" - }, - "spark_version": { - "type": "string", - "description": "Spark version used for job cluster" - }, - "node_type_id": { - "type": "string", - "description": "Node type id for job cluster" - }, - "unique_id": { - "type": "string", - "description": "Unique ID for job name" - }, - "instance_pool_id": { - "type": "string", - "description": "Instance pool id for job cluster" - } - } -} diff --git a/acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__init__.py b/acceptance/bundle/integration_whl/interactive_cluster/my_test_code/__init__.py similarity index 100% rename from acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__init__.py rename to acceptance/bundle/integration_whl/interactive_cluster/my_test_code/__init__.py diff --git a/acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__main__.py b/acceptance/bundle/integration_whl/interactive_cluster/my_test_code/__main__.py similarity index 100% rename from acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__main__.py rename to acceptance/bundle/integration_whl/interactive_cluster/my_test_code/__main__.py diff --git a/acceptance/bundle/integration_whl/interactive_cluster/output.txt b/acceptance/bundle/integration_whl/interactive_cluster/output.txt index 77b99ace2..be1d9e375 100644 --- a/acceptance/bundle/integration_whl/interactive_cluster/output.txt +++ b/acceptance/bundle/integration_whl/interactive_cluster/output.txt @@ -1,15 +1,4 @@ ->>> cat input.json -{ - "project_name": "my_test_code", - "spark_version": "13.3.x-snapshot-scala2.12", - "node_type_id": "[NODE_TYPE_ID]", - "unique_id": "[UNIQUE_NAME]", - "python_wheel_wrapper": false, - "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" -} -✨ Successfully initialized template - >>> cat databricks.yml bundle: name: wheel-task diff --git a/acceptance/bundle/integration_whl/interactive_cluster/script b/acceptance/bundle/integration_whl/interactive_cluster/script index 65b4ee2d2..84498a7c9 100644 --- a/acceptance/bundle/integration_whl/interactive_cluster/script +++ b/acceptance/bundle/integration_whl/interactive_cluster/script @@ -1,8 +1,4 @@ -export SPARK_VERSION=$DEFAULT_SPARK_VERSION -export PYTHON_WHEEL_WRAPPER=false -envsubst < $TESTDIR/../base/input.json.tmpl > input.json -trace cat input.json -$CLI bundle init . --config-file input.json +envsubst < databricks.yml.tmpl > databricks.yml trace cat databricks.yml trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT trace $CLI bundle deploy diff --git a/acceptance/bundle/integration_whl/interactive_cluster/setup.py b/acceptance/bundle/integration_whl/interactive_cluster/setup.py new file mode 100644 index 000000000..8b48a92b4 --- /dev/null +++ b/acceptance/bundle/integration_whl/interactive_cluster/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my example wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl b/acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl deleted file mode 100644 index b528657b1..000000000 --- a/acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl +++ /dev/null @@ -1,15 +0,0 @@ -from setuptools import setup, find_packages - -import {{.project_name}} - -setup( - name="{{.project_name}}", - version={{.project_name}}.__version__, - author={{.project_name}}.__author__, - url="https://databricks.com", - author_email="john.doe@databricks.com", - description="my example wheel", - packages=find_packages(include=["{{.project_name}}"]), - entry_points={"group1": "run={{.project_name}}.__main__:main"}, - install_requires=["setuptools"], -) diff --git a/acceptance/bundle/integration_whl/test.toml b/acceptance/bundle/integration_whl/test.toml index 2e53f7b53..82e4d48d6 100644 --- a/acceptance/bundle/integration_whl/test.toml +++ b/acceptance/bundle/integration_whl/test.toml @@ -7,8 +7,9 @@ Ignore = [ "my_test_code", "my_test_code.egg-info", "setup.py", - "input.json", "databricks.yml", + "python_wheel_wrapper.yml", + "empty.yml", ] [[Repls]] diff --git a/acceptance/bundle/integration_whl/wrapper/output.txt b/acceptance/bundle/integration_whl/wrapper/output.txt index 06e97bb27..287cefed1 100644 --- a/acceptance/bundle/integration_whl/wrapper/output.txt +++ b/acceptance/bundle/integration_whl/wrapper/output.txt @@ -1,15 +1,4 @@ ->>> cat input.json -{ - "project_name": "my_test_code", - "spark_version": "12.2.x-scala2.12", - "node_type_id": "[NODE_TYPE_ID]", - "unique_id": "[UNIQUE_NAME]", - "python_wheel_wrapper": true, - "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" -} -✨ Successfully initialized template - >>> cat databricks.yml bundle: name: wheel-task @@ -17,10 +6,8 @@ bundle: workspace: root_path: "~/.bundle/[UNIQUE_NAME]" - -experimental: - python_wheel_wrapper: true - +include: + - python_wheel_wrapper.yml resources: jobs: @@ -30,10 +17,10 @@ resources: - task_key: TestTask new_cluster: num_workers: 1 - spark_version: "12.2.x-scala2.12" - node_type_id: "[NODE_TYPE_ID]" + spark_version: 12.2.x-scala2.12 + node_type_id: [NODE_TYPE_ID] data_security_mode: USER_ISOLATION - instance_pool_id: "[TEST_INSTANCE_POOL_ID]" + instance_pool_id: [TEST_INSTANCE_POOL_ID] python_wheel_task: package_name: my_test_code entry_point: run diff --git a/acceptance/bundle/integration_whl/wrapper/script b/acceptance/bundle/integration_whl/wrapper/script index 3e6afcfaf..68ca611ca 100644 --- a/acceptance/bundle/integration_whl/wrapper/script +++ b/acceptance/bundle/integration_whl/wrapper/script @@ -2,11 +2,10 @@ # But before users used older DBRs and python wheel tasks but installed it from DBFS. # We still want to support older DBRs and did the trampoline workaround (https://github.com/databricks/cli/pull/635) # Hence this is to test that python wheel tasks in DABs are working for older DBRs -export SPARK_VERSION=12.2.x-scala2.12 -export PYTHON_WHEEL_WRAPPER=true -envsubst < $TESTDIR/../base/input.json.tmpl > input.json -trace cat input.json -$CLI bundle init $TESTDIR/../base --config-file input.json +export DEFAULT_SPARK_VERSION=12.2.x-scala2.12 +export EXTRA_CONFIG=python_wheel_wrapper.yml +envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml +cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} . trace cat databricks.yml trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT trace $CLI bundle deploy diff --git a/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt b/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt index c17ba0f8b..529e3f44c 100644 --- a/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt +++ b/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt @@ -1,15 +1,4 @@ ->>> cat input.json -{ - "project_name": "my_test_code", - "spark_version": "12.2.x-scala2.12", - "node_type_id": "[NODE_TYPE_ID]", - "unique_id": "[UNIQUE_NAME]", - "python_wheel_wrapper": true, - "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" -} -✨ Successfully initialized template - >>> cat databricks.yml bundle: name: wheel-task @@ -17,10 +6,8 @@ bundle: workspace: root_path: "~/.bundle/[UNIQUE_NAME]" - -experimental: - python_wheel_wrapper: true - +include: + - python_wheel_wrapper.yml resources: jobs: @@ -30,10 +17,10 @@ resources: - task_key: TestTask new_cluster: num_workers: 1 - spark_version: "12.2.x-scala2.12" - node_type_id: "[NODE_TYPE_ID]" + spark_version: 12.2.x-scala2.12 + node_type_id: [NODE_TYPE_ID] data_security_mode: USER_ISOLATION - instance_pool_id: "[TEST_INSTANCE_POOL_ID]" + instance_pool_id: [TEST_INSTANCE_POOL_ID] python_wheel_task: package_name: my_test_code entry_point: run diff --git a/acceptance/bundle/integration_whl/wrapper_custom_params/script b/acceptance/bundle/integration_whl/wrapper_custom_params/script index c92be51fa..ab9c45032 100644 --- a/acceptance/bundle/integration_whl/wrapper_custom_params/script +++ b/acceptance/bundle/integration_whl/wrapper_custom_params/script @@ -1,8 +1,7 @@ -export SPARK_VERSION=12.2.x-scala2.12 -export PYTHON_WHEEL_WRAPPER=true -envsubst < $TESTDIR/../base/input.json.tmpl > input.json -trace cat input.json -$CLI bundle init $TESTDIR/../base --config-file input.json +export DEFAULT_SPARK_VERSION=12.2.x-scala2.12 +export EXTRA_CONFIG=python_wheel_wrapper.yml +envsubst < $TESTDIR/../base/databricks.yml.tmpl > databricks.yml +cp -r $TESTDIR/../base/{$EXTRA_CONFIG,setup.py,my_test_code} . trace cat databricks.yml trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT trace $CLI bundle deploy