Run test to confirm type mismatch errors and opportunities

This commit is contained in:
Pieter Noordhuis 2024-02-02 13:23:53 +01:00
parent cb3ad737f1
commit fc18de11d1
No known key found for this signature in database
GPG Key ID: 12ACCCC104CF2930
2 changed files with 87 additions and 0 deletions

View File

@ -0,0 +1,41 @@
package jobs_test
import (
"os"
"testing"
"github.com/databricks/cli/libs/dyn/convert"
"github.com/databricks/cli/libs/dyn/yamlloader"
"github.com/databricks/cli/libs/flags"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestJobsSubmitInvalidJson(t *testing.T) {
var jsonFlag flags.JsonFlag
err := jsonFlag.Set("@testdata/job_cluster_test.json")
require.NoError(t, err)
var submitReq jobs.SubmitRun
err = jsonFlag.Unmarshal(&submitReq)
require.NoError(t, err)
}
func TestJobsSubmitInvalidJsonThroughYaml(t *testing.T) {
path := "testdata/job_cluster_test.json"
f, err := os.Open(path)
require.NoError(t, err)
defer f.Close()
v, err := yamlloader.LoadYAML(path, f)
require.NoError(t, err)
var submitReq jobs.SubmitRun
nv, diag := convert.Normalize(&submitReq, v)
assert.Empty(t, diag)
assert.NotNil(t, nv)
}

View File

@ -0,0 +1,46 @@
{
"name": "name",
"email_notifications": {
"on_start": [
"name@domain.test"
],
"on_success": [
"name@domain.test"
],
"on_failure": [
"name@domain.test"
]
},
"tasks": {
"task_key": "key",
"max_retries": 0,
"min_retry_interval_millis": 0,
"retry_on_timeout": true,
"timeout_seconds": 3600,
"email_notifications": {},
"libraries": [],
"notebook_task": {
"notebook_path": "/some/path/to/notebook"
},
"new_cluster": {
"spark_version": "14.3.x-scala2.12",
"spark_conf": {
"spark.driver.extraJavaOptions": "-Dlog4j2.formatMsgNoLookups=true",
"spark.executor.extraJavaOptions": "-Dlog4j2.formatMsgNoLookups=true",
"spark.shuffle.service.enabled": true
},
"aws_attributes": {
"availability": "SPOT_WITH_FALLBACK",
"spot_bid_price_percent": 100,
"ebs_volume_count": 0
},
"node_type_id": "i3en.large",
"driver_node_type_id": "i3en.large",
"spark_env_vars": {
"PYSPARK_PYTHON": "/databricks/python3/bin/python3"
},
"enable_elastic_disk": false,
"num_workers": 1
}
}
}