diff --git a/cmd/workspace/jobs/jobs_test.go b/cmd/workspace/jobs/jobs_test.go new file mode 100644 index 000000000..66df531d4 --- /dev/null +++ b/cmd/workspace/jobs/jobs_test.go @@ -0,0 +1,41 @@ +package jobs_test + +import ( + "os" + "testing" + + "github.com/databricks/cli/libs/dyn/convert" + "github.com/databricks/cli/libs/dyn/yamlloader" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestJobsSubmitInvalidJson(t *testing.T) { + var jsonFlag flags.JsonFlag + + err := jsonFlag.Set("@testdata/job_cluster_test.json") + require.NoError(t, err) + + var submitReq jobs.SubmitRun + err = jsonFlag.Unmarshal(&submitReq) + require.NoError(t, err) +} + +func TestJobsSubmitInvalidJsonThroughYaml(t *testing.T) { + path := "testdata/job_cluster_test.json" + f, err := os.Open(path) + require.NoError(t, err) + defer f.Close() + + v, err := yamlloader.LoadYAML(path, f) + require.NoError(t, err) + + var submitReq jobs.SubmitRun + nv, diag := convert.Normalize(&submitReq, v) + + assert.Empty(t, diag) + assert.NotNil(t, nv) + +} diff --git a/cmd/workspace/jobs/testdata/job_cluster_test.json b/cmd/workspace/jobs/testdata/job_cluster_test.json new file mode 100644 index 000000000..4516faf3a --- /dev/null +++ b/cmd/workspace/jobs/testdata/job_cluster_test.json @@ -0,0 +1,46 @@ +{ + "name": "name", + "email_notifications": { + "on_start": [ + "name@domain.test" + ], + "on_success": [ + "name@domain.test" + ], + "on_failure": [ + "name@domain.test" + ] + }, + "tasks": { + "task_key": "key", + "max_retries": 0, + "min_retry_interval_millis": 0, + "retry_on_timeout": true, + "timeout_seconds": 3600, + "email_notifications": {}, + "libraries": [], + "notebook_task": { + "notebook_path": "/some/path/to/notebook" + }, + "new_cluster": { + "spark_version": "14.3.x-scala2.12", + "spark_conf": { + "spark.driver.extraJavaOptions": "-Dlog4j2.formatMsgNoLookups=true", + "spark.executor.extraJavaOptions": "-Dlog4j2.formatMsgNoLookups=true", + "spark.shuffle.service.enabled": true + }, + "aws_attributes": { + "availability": "SPOT_WITH_FALLBACK", + "spot_bid_price_percent": 100, + "ebs_volume_count": 0 + }, + "node_type_id": "i3en.large", + "driver_node_type_id": "i3en.large", + "spark_env_vars": { + "PYSPARK_PYTHON": "/databricks/python3/bin/python3" + }, + "enable_elastic_disk": false, + "num_workers": 1 + } + } +}