2024-07-16 08:57:04 +00:00
|
|
|
package bundle
|
|
|
|
|
|
|
|
import (
|
2024-08-09 15:13:31 +00:00
|
|
|
"context"
|
2024-07-16 08:57:04 +00:00
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/databricks/cli/internal"
|
|
|
|
"github.com/databricks/cli/internal/acc"
|
2024-08-09 15:13:31 +00:00
|
|
|
"github.com/databricks/cli/internal/testutil"
|
2024-08-06 18:58:34 +00:00
|
|
|
"github.com/databricks/cli/libs/env"
|
2024-07-16 08:57:04 +00:00
|
|
|
"github.com/google/uuid"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
2024-08-09 15:13:31 +00:00
|
|
|
func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion string, artifactPath string) {
|
2024-08-06 18:58:34 +00:00
|
|
|
cloudEnv := internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
|
|
|
|
nodeTypeId := internal.GetNodeTypeId(cloudEnv)
|
2024-07-16 08:57:04 +00:00
|
|
|
tmpDir := t.TempDir()
|
2024-08-06 18:58:34 +00:00
|
|
|
instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID")
|
2024-07-16 08:57:04 +00:00
|
|
|
bundleRoot, err := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{
|
2024-08-06 18:58:34 +00:00
|
|
|
"node_type_id": nodeTypeId,
|
|
|
|
"unique_id": uuid.New().String(),
|
|
|
|
"spark_version": sparkVersion,
|
|
|
|
"root": tmpDir,
|
2024-08-09 15:13:31 +00:00
|
|
|
"artifact_path": artifactPath,
|
2024-08-06 18:58:34 +00:00
|
|
|
"instance_pool_id": instancePoolId,
|
2024-07-16 08:57:04 +00:00
|
|
|
}, tmpDir)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
err = deployBundle(t, ctx, bundleRoot)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
t.Cleanup(func() {
|
2024-12-09 12:56:41 +00:00
|
|
|
err := destroyBundle(t, ctx, bundleRoot)
|
|
|
|
require.NoError(t, err)
|
2024-07-16 08:57:04 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
out, err := runResource(t, ctx, bundleRoot, "jar_job")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Contains(t, out, "Hello from Jar!")
|
|
|
|
}
|
|
|
|
|
2024-08-09 15:13:31 +00:00
|
|
|
func runSparkJarTestFromVolume(t *testing.T, sparkVersion string) {
|
|
|
|
ctx, wt := acc.UcWorkspaceTest(t)
|
|
|
|
volumePath := internal.TemporaryUcVolume(t, wt.W)
|
|
|
|
ctx = env.Set(ctx, "DATABRICKS_BUNDLE_TARGET", "volume")
|
|
|
|
runSparkJarTestCommon(t, ctx, sparkVersion, volumePath)
|
|
|
|
}
|
|
|
|
|
|
|
|
func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) {
|
|
|
|
ctx, _ := acc.WorkspaceTest(t)
|
|
|
|
ctx = env.Set(ctx, "DATABRICKS_BUNDLE_TARGET", "workspace")
|
|
|
|
runSparkJarTestCommon(t, ctx, sparkVersion, "n/a")
|
|
|
|
}
|
|
|
|
|
2024-07-16 08:57:04 +00:00
|
|
|
func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) {
|
2024-08-09 15:13:31 +00:00
|
|
|
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
|
|
|
|
testutil.RequireJDK(t, context.Background(), "1.8.0")
|
|
|
|
|
|
|
|
// Failure on earlier DBR versions:
|
|
|
|
//
|
|
|
|
// JAR installation from Volumes is supported on UC Clusters with DBR >= 13.3.
|
|
|
|
// Denied library is Jar(/Volumes/main/test-schema-ldgaklhcahlg/my-volume/.internal/PrintArgs.jar)
|
|
|
|
//
|
|
|
|
|
|
|
|
versions := []string{
|
|
|
|
"13.3.x-scala2.12", // 13.3 LTS (includes Apache Spark 3.4.1, Scala 2.12)
|
|
|
|
"14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12)
|
|
|
|
"15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, version := range versions {
|
|
|
|
t.Run(version, func(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
runSparkJarTestFromVolume(t, version)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) {
|
|
|
|
internal.GetEnvOrSkipTest(t, "CLOUD_ENV")
|
|
|
|
testutil.RequireJDK(t, context.Background(), "1.8.0")
|
|
|
|
|
|
|
|
// Failure on earlier DBR versions:
|
|
|
|
//
|
|
|
|
// Library from /Workspace is not allowed on this cluster.
|
|
|
|
// Please switch to using DBR 14.1+ No Isolation Shared or DBR 13.1+ Shared cluster or 13.2+ Assigned cluster to use /Workspace libraries.
|
|
|
|
//
|
|
|
|
|
|
|
|
versions := []string{
|
|
|
|
"14.3.x-scala2.12", // 14.3 LTS (includes Apache Spark 3.5.0, Scala 2.12)
|
|
|
|
"15.4.x-scala2.12", // 15.4 LTS Beta (includes Apache Spark 3.5.0, Scala 2.12)
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, version := range versions {
|
|
|
|
t.Run(version, func(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
runSparkJarTestFromWorkspace(t, version)
|
|
|
|
})
|
|
|
|
}
|
2024-07-16 08:57:04 +00:00
|
|
|
}
|