From 8b51eeb57a66999d1c1abfbe01700882d2d44015 Mon Sep 17 00:00:00 2001 From: Denis Bilenko Date: Wed, 12 Mar 2025 11:51:31 +0100 Subject: [PATCH] Convert python_wheel_test.go to acceptance test (#2471) Convert integration/bundle/integration/bundle/python_wheel_test.go to acceptance tests. I plan to expand these tests to check patchwheel functionality. Inside each test there were two runs - with params and without, I've expanded each run into separate test to reduce total time as this runs can be done in parallel. Also add new env var DEFAULT_SPARK_VERSION that matches the one in integration tests. The tests are currently enabled on every PR (`CloudLong=true` is commented out), this can be changed after landing. --- acceptance/acceptance_test.go | 3 + .../base}/databricks_template_schema.json | 0 .../integration_whl/base/input.json.tmpl | 8 +++ .../bundle/integration_whl/base/output.txt | 35 ++++++++++ acceptance/bundle/integration_whl/base/script | 8 +++ .../base}/template/databricks.yml.tmpl | 0 .../base}/template/setup.py.tmpl | 0 .../template/{{.project_name}}/__init__.py | 0 .../template/{{.project_name}}/__main__.py | 0 .../integration_whl/custom_params/output.txt | 35 ++++++++++ .../integration_whl/custom_params/script | 8 +++ .../databricks_template_schema.json | 0 .../interactive_cluster/output.txt | 35 ++++++++++ .../interactive_cluster/script | 8 +++ .../template/databricks.yml.tmpl | 0 .../template/setup.py.tmpl | 0 .../template/{{.project_name}}/__init__.py | 0 .../template/{{.project_name}}/__main__.py | 0 acceptance/bundle/integration_whl/test.toml | 20 ++++++ .../bundle/integration_whl/wrapper/output.txt | 35 ++++++++++ .../bundle/integration_whl/wrapper/script | 12 ++++ .../wrapper_custom_params/output.txt | 35 ++++++++++ .../wrapper_custom_params/script | 8 +++ integration/bundle/python_wheel_test.go | 64 ------------------- 24 files changed, 250 insertions(+), 64 deletions(-) rename {integration/bundle/bundles/python_wheel_task => acceptance/bundle/integration_whl/base}/databricks_template_schema.json (100%) create mode 100644 acceptance/bundle/integration_whl/base/input.json.tmpl create mode 100644 acceptance/bundle/integration_whl/base/output.txt create mode 100644 acceptance/bundle/integration_whl/base/script rename {integration/bundle/bundles/python_wheel_task => acceptance/bundle/integration_whl/base}/template/databricks.yml.tmpl (100%) rename {integration/bundle/bundles/python_wheel_task => acceptance/bundle/integration_whl/base}/template/setup.py.tmpl (100%) rename {integration/bundle/bundles/python_wheel_task => acceptance/bundle/integration_whl/base}/template/{{.project_name}}/__init__.py (100%) rename {integration/bundle/bundles/python_wheel_task => acceptance/bundle/integration_whl/base}/template/{{.project_name}}/__main__.py (100%) create mode 100644 acceptance/bundle/integration_whl/custom_params/output.txt create mode 100644 acceptance/bundle/integration_whl/custom_params/script rename {integration/bundle/bundles/python_wheel_task_with_cluster => acceptance/bundle/integration_whl/interactive_cluster}/databricks_template_schema.json (100%) create mode 100644 acceptance/bundle/integration_whl/interactive_cluster/output.txt create mode 100644 acceptance/bundle/integration_whl/interactive_cluster/script rename {integration/bundle/bundles/python_wheel_task_with_cluster => acceptance/bundle/integration_whl/interactive_cluster}/template/databricks.yml.tmpl (100%) rename {integration/bundle/bundles/python_wheel_task_with_cluster => acceptance/bundle/integration_whl/interactive_cluster}/template/setup.py.tmpl (100%) rename {integration/bundle/bundles/python_wheel_task_with_cluster => acceptance/bundle/integration_whl/interactive_cluster}/template/{{.project_name}}/__init__.py (100%) rename {integration/bundle/bundles/python_wheel_task_with_cluster => acceptance/bundle/integration_whl/interactive_cluster}/template/{{.project_name}}/__main__.py (100%) create mode 100644 acceptance/bundle/integration_whl/test.toml create mode 100644 acceptance/bundle/integration_whl/wrapper/output.txt create mode 100644 acceptance/bundle/integration_whl/wrapper/script create mode 100644 acceptance/bundle/integration_whl/wrapper_custom_params/output.txt create mode 100644 acceptance/bundle/integration_whl/wrapper_custom_params/script delete mode 100644 integration/bundle/python_wheel_test.go diff --git a/acceptance/acceptance_test.go b/acceptance/acceptance_test.go index 988f5fcfd..4a38e6f13 100644 --- a/acceptance/acceptance_test.go +++ b/acceptance/acceptance_test.go @@ -170,6 +170,9 @@ func testAccept(t *testing.T, InprocessMode bool, singleTest string) int { repls.Repls = append(repls.Repls, testdiff.Replacement{Old: regexp.MustCompile("dbapi[0-9a-f]+"), New: "[DATABRICKS_TOKEN]"}) + // Matches defaultSparkVersion in ../integration/bundle/helpers_test.go + t.Setenv("DEFAULT_SPARK_VERSION", "13.3.x-snapshot-scala2.12") + testDirs := getTests(t) require.NotEmpty(t, testDirs) diff --git a/integration/bundle/bundles/python_wheel_task/databricks_template_schema.json b/acceptance/bundle/integration_whl/base/databricks_template_schema.json similarity index 100% rename from integration/bundle/bundles/python_wheel_task/databricks_template_schema.json rename to acceptance/bundle/integration_whl/base/databricks_template_schema.json diff --git a/acceptance/bundle/integration_whl/base/input.json.tmpl b/acceptance/bundle/integration_whl/base/input.json.tmpl new file mode 100644 index 000000000..8d4255be2 --- /dev/null +++ b/acceptance/bundle/integration_whl/base/input.json.tmpl @@ -0,0 +1,8 @@ +{ + "project_name": "my_test_code", + "spark_version": "$SPARK_VERSION", + "node_type_id": "$NODE_TYPE_ID", + "unique_id": "$UNIQUE_NAME", + "python_wheel_wrapper": $PYTHON_WHEEL_WRAPPER, + "instance_pool_id": "$TEST_INSTANCE_POOL_ID" +} diff --git a/acceptance/bundle/integration_whl/base/output.txt b/acceptance/bundle/integration_whl/base/output.txt new file mode 100644 index 000000000..a6aadac83 --- /dev/null +++ b/acceptance/bundle/integration_whl/base/output.txt @@ -0,0 +1,35 @@ +{ + "project_name": "my_test_code", + "spark_version": "13.3.x-snapshot-scala2.12", + "node_type_id": "", + "unique_id": "[UNIQUE_NAME]", + "python_wheel_wrapper": false, + "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" +} +✨ Successfully initialized template + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle run some_other_job +Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[NUMID]/run/[NUMID] + +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS +Hello from my func +Got arguments: +['my_test_code', 'one', 'two'] + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job some_other_job + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/integration_whl/base/script b/acceptance/bundle/integration_whl/base/script new file mode 100644 index 000000000..06c3bffdc --- /dev/null +++ b/acceptance/bundle/integration_whl/base/script @@ -0,0 +1,8 @@ +export SPARK_VERSION=$DEFAULT_SPARK_VERSION +export PYTHON_WHEEL_WRAPPER=false +envsubst < input.json.tmpl > input.json +cat input.json +$CLI bundle init . --config-file input.json +trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT +trace $CLI bundle deploy +trace $CLI bundle run some_other_job diff --git a/integration/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/acceptance/bundle/integration_whl/base/template/databricks.yml.tmpl similarity index 100% rename from integration/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl rename to acceptance/bundle/integration_whl/base/template/databricks.yml.tmpl diff --git a/integration/bundle/bundles/python_wheel_task/template/setup.py.tmpl b/acceptance/bundle/integration_whl/base/template/setup.py.tmpl similarity index 100% rename from integration/bundle/bundles/python_wheel_task/template/setup.py.tmpl rename to acceptance/bundle/integration_whl/base/template/setup.py.tmpl diff --git a/integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py b/acceptance/bundle/integration_whl/base/template/{{.project_name}}/__init__.py similarity index 100% rename from integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py rename to acceptance/bundle/integration_whl/base/template/{{.project_name}}/__init__.py diff --git a/integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py b/acceptance/bundle/integration_whl/base/template/{{.project_name}}/__main__.py similarity index 100% rename from integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py rename to acceptance/bundle/integration_whl/base/template/{{.project_name}}/__main__.py diff --git a/acceptance/bundle/integration_whl/custom_params/output.txt b/acceptance/bundle/integration_whl/custom_params/output.txt new file mode 100644 index 000000000..f4715eab7 --- /dev/null +++ b/acceptance/bundle/integration_whl/custom_params/output.txt @@ -0,0 +1,35 @@ +{ + "project_name": "my_test_code", + "spark_version": "13.3.x-snapshot-scala2.12", + "node_type_id": "", + "unique_id": "[UNIQUE_NAME]", + "python_wheel_wrapper": false, + "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" +} +✨ Successfully initialized template + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle run some_other_job --python-params param1,param2 +Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[NUMID]/run/[NUMID] + +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS +Hello from my func +Got arguments: +['my_test_code', 'param1', 'param2'] + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job some_other_job + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/integration_whl/custom_params/script b/acceptance/bundle/integration_whl/custom_params/script new file mode 100644 index 000000000..3abb7aafa --- /dev/null +++ b/acceptance/bundle/integration_whl/custom_params/script @@ -0,0 +1,8 @@ +export SPARK_VERSION=$DEFAULT_SPARK_VERSION +export PYTHON_WHEEL_WRAPPER=false +envsubst < $TESTDIR/../base/input.json.tmpl > input.json +cat input.json +$CLI bundle init $TESTDIR/../base --config-file input.json +trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT +trace $CLI bundle deploy +trace $CLI bundle run some_other_job --python-params param1,param2 diff --git a/integration/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json b/acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json similarity index 100% rename from integration/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json rename to acceptance/bundle/integration_whl/interactive_cluster/databricks_template_schema.json diff --git a/acceptance/bundle/integration_whl/interactive_cluster/output.txt b/acceptance/bundle/integration_whl/interactive_cluster/output.txt new file mode 100644 index 000000000..a6aadac83 --- /dev/null +++ b/acceptance/bundle/integration_whl/interactive_cluster/output.txt @@ -0,0 +1,35 @@ +{ + "project_name": "my_test_code", + "spark_version": "13.3.x-snapshot-scala2.12", + "node_type_id": "", + "unique_id": "[UNIQUE_NAME]", + "python_wheel_wrapper": false, + "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" +} +✨ Successfully initialized template + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle run some_other_job +Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[NUMID]/run/[NUMID] + +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS +Hello from my func +Got arguments: +['my_test_code', 'one', 'two'] + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job some_other_job + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/integration_whl/interactive_cluster/script b/acceptance/bundle/integration_whl/interactive_cluster/script new file mode 100644 index 000000000..7e10d6299 --- /dev/null +++ b/acceptance/bundle/integration_whl/interactive_cluster/script @@ -0,0 +1,8 @@ +export SPARK_VERSION=$DEFAULT_SPARK_VERSION +export PYTHON_WHEEL_WRAPPER=false +envsubst < $TESTDIR/../base/input.json.tmpl > input.json +cat input.json +$CLI bundle init $TESTDIR/../base --config-file input.json +trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT +trace $CLI bundle deploy +trace $CLI bundle run some_other_job diff --git a/integration/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl b/acceptance/bundle/integration_whl/interactive_cluster/template/databricks.yml.tmpl similarity index 100% rename from integration/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl rename to acceptance/bundle/integration_whl/interactive_cluster/template/databricks.yml.tmpl diff --git a/integration/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl b/acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl similarity index 100% rename from integration/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl rename to acceptance/bundle/integration_whl/interactive_cluster/template/setup.py.tmpl diff --git a/integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py b/acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__init__.py similarity index 100% rename from integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py rename to acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__init__.py diff --git a/integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py b/acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__main__.py similarity index 100% rename from integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py rename to acceptance/bundle/integration_whl/interactive_cluster/template/{{.project_name}}/__main__.py diff --git a/acceptance/bundle/integration_whl/test.toml b/acceptance/bundle/integration_whl/test.toml new file mode 100644 index 000000000..2e53f7b53 --- /dev/null +++ b/acceptance/bundle/integration_whl/test.toml @@ -0,0 +1,20 @@ +Local = false +#CloudSlow = true +Ignore = [ + ".databricks", + "build", + "dist", + "my_test_code", + "my_test_code.egg-info", + "setup.py", + "input.json", + "databricks.yml", +] + +[[Repls]] +Old = '2\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d' +New = "[TIMESTAMP]" + +[[Repls]] +Old = '\d{5,}' +New = "[NUMID]" diff --git a/acceptance/bundle/integration_whl/wrapper/output.txt b/acceptance/bundle/integration_whl/wrapper/output.txt new file mode 100644 index 000000000..ca1819f3c --- /dev/null +++ b/acceptance/bundle/integration_whl/wrapper/output.txt @@ -0,0 +1,35 @@ +{ + "project_name": "my_test_code", + "spark_version": "12.2.x-scala2.12", + "node_type_id": "", + "unique_id": "[UNIQUE_NAME]", + "python_wheel_wrapper": true, + "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" +} +✨ Successfully initialized template + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle run some_other_job +Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[NUMID]/run/[NUMID] + +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS +Hello from my func +Got arguments: +['my_test_code', 'one', 'two'] + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job some_other_job + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/integration_whl/wrapper/script b/acceptance/bundle/integration_whl/wrapper/script new file mode 100644 index 000000000..ee57bc783 --- /dev/null +++ b/acceptance/bundle/integration_whl/wrapper/script @@ -0,0 +1,12 @@ +# Installing wheels from Workspace file system is only supported starting from DBR 13.1+. +# But before users used older DBRs and python wheel tasks but installed it from DBFS. +# We still want to support older DBRs and did the trampoline workaround (https://github.com/databricks/cli/pull/635) +# Hence this is to test that python wheel tasks in DABs are working for older DBRs +export SPARK_VERSION=12.2.x-scala2.12 +export PYTHON_WHEEL_WRAPPER=true +envsubst < $TESTDIR/../base/input.json.tmpl > input.json +cat input.json +$CLI bundle init $TESTDIR/../base --config-file input.json +trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT +trace $CLI bundle deploy +trace $CLI bundle run some_other_job diff --git a/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt b/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt new file mode 100644 index 000000000..b5f97d5bc --- /dev/null +++ b/acceptance/bundle/integration_whl/wrapper_custom_params/output.txt @@ -0,0 +1,35 @@ +{ + "project_name": "my_test_code", + "spark_version": "12.2.x-scala2.12", + "node_type_id": "", + "unique_id": "[UNIQUE_NAME]", + "python_wheel_wrapper": true, + "instance_pool_id": "[TEST_INSTANCE_POOL_ID]" +} +✨ Successfully initialized template + +>>> [CLI] bundle deploy +Building python_artifact... +Uploading my_test_code-0.0.1-py3-none-any.whl... +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME]/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> [CLI] bundle run some_other_job --python-params param1,param2 +Run URL: [DATABRICKS_URL]/?o=[NUMID]#job/[NUMID]/run/[NUMID] + +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" RUNNING +[TIMESTAMP] "[default] Test Wheel Job [UNIQUE_NAME]" TERMINATED SUCCESS +Hello from my func +Got arguments: +['my_test_code', 'param1', 'param2'] + +>>> [CLI] bundle destroy --auto-approve +The following resources will be deleted: + delete job some_other_job + +All files and directories at the following location will be deleted: /Workspace/Users/[USERNAME]/.bundle/[UNIQUE_NAME] + +Deleting files... +Destroy complete! diff --git a/acceptance/bundle/integration_whl/wrapper_custom_params/script b/acceptance/bundle/integration_whl/wrapper_custom_params/script new file mode 100644 index 000000000..c92f7162d --- /dev/null +++ b/acceptance/bundle/integration_whl/wrapper_custom_params/script @@ -0,0 +1,8 @@ +export SPARK_VERSION=12.2.x-scala2.12 +export PYTHON_WHEEL_WRAPPER=true +envsubst < $TESTDIR/../base/input.json.tmpl > input.json +cat input.json +$CLI bundle init $TESTDIR/../base --config-file input.json +trap "errcode trace '$CLI' bundle destroy --auto-approve" EXIT +trace $CLI bundle deploy +trace $CLI bundle run some_other_job --python-params param1,param2 diff --git a/integration/bundle/python_wheel_test.go b/integration/bundle/python_wheel_test.go deleted file mode 100644 index 62846f7b5..000000000 --- a/integration/bundle/python_wheel_test.go +++ /dev/null @@ -1,64 +0,0 @@ -package bundle_test - -import ( - "testing" - - "github.com/databricks/cli/integration/internal/acc" - "github.com/databricks/cli/internal/testutil" - "github.com/databricks/cli/libs/env" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonWheelWrapper bool) { - ctx, _ := acc.WorkspaceTest(t) - - nodeTypeId := testutil.GetCloud(t).NodeTypeID() - instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID") - bundleRoot := initTestTemplate(t, ctx, templateName, map[string]any{ - "node_type_id": nodeTypeId, - "unique_id": uuid.New().String(), - "spark_version": sparkVersion, - "python_wheel_wrapper": pythonWheelWrapper, - "instance_pool_id": instancePoolId, - }) - - deployBundle(t, ctx, bundleRoot) - - t.Cleanup(func() { - destroyBundle(t, ctx, bundleRoot) - }) - - if testing.Short() { - t.Log("Skip the job run in short mode") - return - } - - out, err := runResource(t, ctx, bundleRoot, "some_other_job") - require.NoError(t, err) - require.Contains(t, out, "Hello from my func") - require.Contains(t, out, "Got arguments:") - require.Contains(t, out, "['my_test_code', 'one', 'two']") - - out, err = runResourceWithParams(t, ctx, bundleRoot, "some_other_job", "--python-params=param1,param2") - require.NoError(t, err) - require.Contains(t, out, "Hello from my func") - require.Contains(t, out, "Got arguments:") - require.Contains(t, out, "['my_test_code', 'param1', 'param2']") -} - -func TestPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) { - runPythonWheelTest(t, "python_wheel_task", "13.3.x-snapshot-scala2.12", false) -} - -func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) { - runPythonWheelTest(t, "python_wheel_task", "12.2.x-scala2.12", true) -} - -func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) { - if testutil.GetCloud(t) == testutil.AWS { - t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters") - } - - runPythonWheelTest(t, "python_wheel_task_with_cluster", defaultSparkVersion, false) -}