Add test for overriding list variable (#2099)

- Add override for "libraries".
- Remove complexvar - it serves no purpose - we already have map
variable ‘cluster’.
This commit is contained in:
Denis Bilenko 2025-01-09 10:03:03 +01:00 committed by GitHub
parent b0706ccdc1
commit b808d4d6f1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 33 additions and 61 deletions

View File

@ -11,6 +11,7 @@ resources:
- task_key: test - task_key: test
job_cluster_key: key job_cluster_key: key
libraries: ${variables.libraries.value} libraries: ${variables.libraries.value}
# specific fields of complex variable are referenced:
task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}" task_key: "task with spark version ${var.cluster.spark_version} and jar ${var.libraries[0].jar}"
variables: variables:
@ -35,14 +36,6 @@ variables:
- jar: "/path/to/jar" - jar: "/path/to/jar"
- egg: "/path/to/egg" - egg: "/path/to/egg"
- whl: "/path/to/whl" - whl: "/path/to/whl"
complexvar:
type: complex
description: "A complex variable"
default:
key1: "value1"
key2: "value2"
key3: "value3"
targets: targets:
default: default:
@ -51,15 +44,13 @@ targets:
variables: variables:
node_type: "Standard_DS3_v3" node_type: "Standard_DS3_v3"
cluster: cluster:
# complex variables are not merged, so missing variables (policy_id) are not inherited
spark_version: "14.2.x-scala2.11" spark_version: "14.2.x-scala2.11"
node_type_id: ${var.node_type} node_type_id: ${var.node_type}
num_workers: 4 num_workers: 4
spark_conf: spark_conf:
spark.speculation: false spark.speculation: false
spark.databricks.delta.retentionDurationCheck.enabled: false spark.databricks.delta.retentionDurationCheck.enabled: false
complexvar: libraries:
type: complex - jar: "/newpath/to/jar"
default: - whl: "/newpath/to/whl"
key1: "1"
key2: "2"
key3: "3"

View File

@ -76,20 +76,6 @@
"spark_version": "13.2.x-scala2.11" "spark_version": "13.2.x-scala2.11"
} }
}, },
"complexvar": {
"default": {
"key1": "value1",
"key2": "value2",
"key3": "value3"
},
"description": "A complex variable",
"type": "complex",
"value": {
"key1": "value1",
"key2": "value2",
"key3": "value3"
}
},
"libraries": { "libraries": {
"default": [ "default": [
{ {

View File

@ -32,16 +32,13 @@
"job_cluster_key": "key", "job_cluster_key": "key",
"libraries": [ "libraries": [
{ {
"jar": "/path/to/jar" "jar": "/newpath/to/jar"
}, },
{ {
"egg": "/path/to/egg" "whl": "/newpath/to/whl"
},
{
"whl": "/path/to/whl"
} }
], ],
"task_key": "task with spark version 14.2.x-scala2.11 and jar /path/to/jar" "task_key": "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
} }
] ]
} }
@ -70,43 +67,23 @@
"spark_version": "14.2.x-scala2.11" "spark_version": "14.2.x-scala2.11"
} }
}, },
"complexvar": {
"default": {
"key1": "1",
"key2": "2",
"key3": "3"
},
"description": "A complex variable",
"type": "complex",
"value": {
"key1": "1",
"key2": "2",
"key3": "3"
}
},
"libraries": { "libraries": {
"default": [ "default": [
{ {
"jar": "/path/to/jar" "jar": "/newpath/to/jar"
}, },
{ {
"egg": "/path/to/egg" "whl": "/newpath/to/whl"
},
{
"whl": "/path/to/whl"
} }
], ],
"description": "A libraries definition", "description": "A libraries definition",
"type": "complex", "type": "complex",
"value": [ "value": [
{ {
"jar": "/path/to/jar" "jar": "/newpath/to/jar"
}, },
{ {
"egg": "/path/to/egg" "whl": "/newpath/to/whl"
},
{
"whl": "/path/to/whl"
} }
] ]
}, },

View File

@ -0,0 +1,14 @@
>>> $CLI bundle validate -o json
>>> jq .resources.jobs.my_job.tasks[0].task_key out.default.json
"task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
>>> $CLI bundle validate -o json -t dev
>>> jq .resources.jobs.my_job.tasks[0].task_key out.dev.json
"task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
policy_id and spark_conf.spark_random fields do not exist in dev target:
>>> jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json
null

View File

@ -1,4 +1,8 @@
$CLI bundle validate -o json | jq '{resources,variables}' > out.default.json trace $CLI bundle validate -o json | jq '{resources,variables}' > out.default.json
trace jq .resources.jobs.my_job.tasks[0].task_key out.default.json | grep "task with spark version 13.2.x-scala2.11 and jar /path/to/jar"
# spark.random and policy_id should be empty in this target: trace $CLI bundle validate -o json -t dev | jq '{resources,variables}' > out.dev.json
$CLI bundle validate -o json -t dev | jq '{resources,variables}' > out.dev.json trace jq .resources.jobs.my_job.tasks[0].task_key out.dev.json | grep "task with spark version 14.2.x-scala2.11 and jar /newpath/to/jar"
echo policy_id and spark_conf.spark_random fields do not exist in dev target:
trace jq .resources.jobs.my_job.job_clusters[0].new_cluster.policy_id out.dev.json | grep null