mirror of https://github.com/databricks/cli.git
Test existing behavior when loading non-string spark conf values (#1071)
## Changes This test is expected to fail when we enable the custom YAML loader.
This commit is contained in:
parent
b17e845d44
commit
cee70a53c8
|
@ -0,0 +1,25 @@
|
||||||
|
resources:
|
||||||
|
jobs:
|
||||||
|
job_with_spark_conf:
|
||||||
|
name: Test job
|
||||||
|
max_concurrent_runs: 1
|
||||||
|
|
||||||
|
job_clusters:
|
||||||
|
- job_cluster_key: test_cluster
|
||||||
|
new_cluster:
|
||||||
|
spark_version: 14.2.x-scala2.12
|
||||||
|
node_type_id: i3.xlarge
|
||||||
|
num_workers: 2
|
||||||
|
spark_conf:
|
||||||
|
|
||||||
|
# Test behavior if non-string values are specified.
|
||||||
|
spark.string: string
|
||||||
|
spark.int: 1
|
||||||
|
spark.bool: true
|
||||||
|
spark.float: 1.2
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
- task_key: test_task
|
||||||
|
job_cluster_key: test_cluster
|
||||||
|
spark_python_task:
|
||||||
|
python_file: test.py
|
|
@ -0,0 +1,22 @@
|
||||||
|
package config_tests
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestJobWithSparkConf(t *testing.T) {
|
||||||
|
b := loadTarget(t, "./job_with_spark_conf", "default")
|
||||||
|
assert.Len(t, b.Config.Resources.Jobs, 1)
|
||||||
|
|
||||||
|
job := b.Config.Resources.Jobs["job_with_spark_conf"]
|
||||||
|
assert.Len(t, job.JobClusters, 1)
|
||||||
|
assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey)
|
||||||
|
|
||||||
|
// Existing behavior is such that including non-string values
|
||||||
|
// in the spark_conf map will cause the job to fail to load.
|
||||||
|
// This is expected to be solved once we switch to the custom YAML loader.
|
||||||
|
tasks := job.Tasks
|
||||||
|
assert.Len(t, tasks, 0, "see https://github.com/databricks/cli/issues/992")
|
||||||
|
}
|
Loading…
Reference in New Issue