diff --git a/bundle/tests/job_with_spark_conf/databricks.yml b/bundle/tests/job_with_spark_conf/databricks.yml new file mode 100644 index 00000000..9b568cf9 --- /dev/null +++ b/bundle/tests/job_with_spark_conf/databricks.yml @@ -0,0 +1,25 @@ +resources: + jobs: + job_with_spark_conf: + name: Test job + max_concurrent_runs: 1 + + job_clusters: + - job_cluster_key: test_cluster + new_cluster: + spark_version: 14.2.x-scala2.12 + node_type_id: i3.xlarge + num_workers: 2 + spark_conf: + + # Test behavior if non-string values are specified. + spark.string: string + spark.int: 1 + spark.bool: true + spark.float: 1.2 + + tasks: + - task_key: test_task + job_cluster_key: test_cluster + spark_python_task: + python_file: test.py diff --git a/bundle/tests/job_with_spark_conf_test.go b/bundle/tests/job_with_spark_conf_test.go new file mode 100644 index 00000000..a2c04c5e --- /dev/null +++ b/bundle/tests/job_with_spark_conf_test.go @@ -0,0 +1,22 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestJobWithSparkConf(t *testing.T) { + b := loadTarget(t, "./job_with_spark_conf", "default") + assert.Len(t, b.Config.Resources.Jobs, 1) + + job := b.Config.Resources.Jobs["job_with_spark_conf"] + assert.Len(t, job.JobClusters, 1) + assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey) + + // Existing behavior is such that including non-string values + // in the spark_conf map will cause the job to fail to load. + // This is expected to be solved once we switch to the custom YAML loader. + tasks := job.Tasks + assert.Len(t, tasks, 0, "see https://github.com/databricks/cli/issues/992") +}