Test existing behavior when loading non-string spark conf values (#1071)

## Changes

This test is expected to fail when we enable the custom YAML loader.
This commit is contained in:
Pieter Noordhuis 2023-12-18 12:22:22 +01:00 committed by GitHub
parent b17e845d44
commit cee70a53c8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 47 additions and 0 deletions

View File

@ -0,0 +1,25 @@
resources:
jobs:
job_with_spark_conf:
name: Test job
max_concurrent_runs: 1
job_clusters:
- job_cluster_key: test_cluster
new_cluster:
spark_version: 14.2.x-scala2.12
node_type_id: i3.xlarge
num_workers: 2
spark_conf:
# Test behavior if non-string values are specified.
spark.string: string
spark.int: 1
spark.bool: true
spark.float: 1.2
tasks:
- task_key: test_task
job_cluster_key: test_cluster
spark_python_task:
python_file: test.py

View File

@ -0,0 +1,22 @@
package config_tests
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestJobWithSparkConf(t *testing.T) {
b := loadTarget(t, "./job_with_spark_conf", "default")
assert.Len(t, b.Config.Resources.Jobs, 1)
job := b.Config.Resources.Jobs["job_with_spark_conf"]
assert.Len(t, job.JobClusters, 1)
assert.Equal(t, "test_cluster", job.JobClusters[0].JobClusterKey)
// Existing behavior is such that including non-string values
// in the spark_conf map will cause the job to fail to load.
// This is expected to be solved once we switch to the custom YAML loader.
tasks := job.Tasks
assert.Len(t, tasks, 0, "see https://github.com/databricks/cli/issues/992")
}