Enable Spark JAR task test (#1658)

## Changes
Enable Spark JAR task test

## Tests
```

Updating deployment state...
Deleting files...
Destroy complete!
--- PASS: TestAccSparkJarTaskDeployAndRunOnVolumes (194.13s)
PASS
coverage: 51.9% of statements in ./...
ok      github.com/databricks/cli/internal/bundle       194.586s        coverage: 51.9% of statements in ./...
```
This commit is contained in:
Andrew Nester 2024-08-06 20:58:34 +02:00 committed by GitHub
parent f3ffded3bf
commit 9d1fbbb39c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 16 additions and 10 deletions

View File

@ -24,6 +24,10 @@
"artifact_path": { "artifact_path": {
"type": "string", "type": "string",
"description": "Path to the remote base path for artifacts" "description": "Path to the remote base path for artifacts"
},
"instance_pool_id": {
"type": "string",
"description": "Instance pool id for job cluster"
} }
} }
} }

View File

@ -22,6 +22,7 @@ resources:
num_workers: 1 num_workers: 1
spark_version: "{{.spark_version}}" spark_version: "{{.spark_version}}"
node_type_id: "{{.node_type_id}}" node_type_id: "{{.node_type_id}}"
instance_pool_id: "{{.instance_pool_id}}"
spark_jar_task: spark_jar_task:
main_class_name: PrintArgs main_class_name: PrintArgs
libraries: libraries:

View File

@ -6,15 +6,14 @@ import (
"github.com/databricks/cli/internal" "github.com/databricks/cli/internal"
"github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/acc"
"github.com/databricks/cli/libs/env"
"github.com/google/uuid" "github.com/google/uuid"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func runSparkJarTest(t *testing.T, sparkVersion string) { func runSparkJarTest(t *testing.T, sparkVersion string) {
t.Skip("Temporarily skipping the test until auth / permission issues for UC volumes are resolved.") cloudEnv := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(cloudEnv)
env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
t.Log(env)
if os.Getenv("TEST_METASTORE_ID") == "" { if os.Getenv("TEST_METASTORE_ID") == "" {
t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") t.Skip("Skipping tests that require a UC Volume when metastore id is not set.")
@ -24,14 +23,16 @@ func runSparkJarTest(t *testing.T, sparkVersion string) {
w := wt.W w := wt.W
volumePath := internal.TemporaryUcVolume(t, w) volumePath := internal.TemporaryUcVolume(t, w)
nodeTypeId := internal.GetNodeTypeId(env) nodeTypeId := internal.GetNodeTypeId(cloudEnv)
tmpDir := t.TempDir() tmpDir := t.TempDir()
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
bundleRoot, err := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{ bundleRoot, err := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{
"node_type_id": nodeTypeId, "node_type_id": nodeTypeId,
"unique_id": uuid.New().String(), "unique_id": uuid.New().String(),
"spark_version": sparkVersion, "spark_version": sparkVersion,
"root": tmpDir, "root": tmpDir,
"artifact_path": volumePath, "artifact_path": volumePath,
"instance_pool_id": instancePoolId,
}, tmpDir) }, tmpDir)
require.NoError(t, err) require.NoError(t, err)