mirror of https://github.com/databricks/cli.git
Compare commits
2 Commits
94d8c3ba1e
...
4e8e027380
Author | SHA1 | Date |
---|---|---|
shreyas-goenka | 4e8e027380 | |
Andrew Nester | 66f2ba64a8 |
|
@ -406,7 +406,14 @@ func (r *Root) MergeTargetOverrides(name string) error {
|
||||||
return r.updateWithDynamicValue(root)
|
return r.updateWithDynamicValue(root)
|
||||||
}
|
}
|
||||||
|
|
||||||
var variableKeywords = []string{"default", "lookup"}
|
var allowedVariableDefinitions = []([]string){
|
||||||
|
{"default", "type", "description"},
|
||||||
|
{"default", "type"},
|
||||||
|
{"default", "description"},
|
||||||
|
{"lookup", "description"},
|
||||||
|
{"default"},
|
||||||
|
{"lookup"},
|
||||||
|
}
|
||||||
|
|
||||||
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
|
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
|
||||||
// A full syntax variable override is a map with either 1 of 2 keys.
|
// A full syntax variable override is a map with either 1 of 2 keys.
|
||||||
|
@ -423,42 +430,21 @@ func isFullVariableOverrideDef(v dyn.Value) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the map has 3 keys, they should be "description", "type" and "default" or "lookup"
|
for _, keys := range allowedVariableDefinitions {
|
||||||
if mv.Len() == 3 {
|
if len(keys) != mv.Len() {
|
||||||
if _, ok := mv.GetByString("type"); ok {
|
continue
|
||||||
if _, ok := mv.GetByString("description"); ok {
|
}
|
||||||
if _, ok := mv.GetByString("default"); ok {
|
|
||||||
return true
|
// Check if the keys are the same.
|
||||||
}
|
match := true
|
||||||
|
for _, key := range keys {
|
||||||
|
if _, ok := mv.GetByString(key); !ok {
|
||||||
|
match = false
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return false
|
if match {
|
||||||
}
|
|
||||||
|
|
||||||
// If the map has 2 keys, one of them should be "default" or "lookup" and the other is "type" or "description"
|
|
||||||
if mv.Len() == 2 {
|
|
||||||
if _, ok := mv.GetByString("type"); ok {
|
|
||||||
if _, ok := mv.GetByString("default"); ok {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := mv.GetByString("description"); ok {
|
|
||||||
if _, ok := mv.GetByString("default"); ok {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := mv.GetByString("lookup"); ok {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, keyword := range variableKeywords {
|
|
||||||
if _, ok := mv.GetByString(keyword); ok {
|
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/config"
|
"github.com/databricks/cli/bundle/config"
|
||||||
"github.com/databricks/cli/bundle/config/resources"
|
"github.com/databricks/cli/bundle/config/resources"
|
||||||
|
@ -82,6 +83,10 @@ func BundleToTerraform(config *config.Root) *schema.Root {
|
||||||
conv(src, &dst)
|
conv(src, &dst)
|
||||||
|
|
||||||
if src.JobSettings != nil {
|
if src.JobSettings != nil {
|
||||||
|
sort.Slice(src.JobSettings.Tasks, func(i, j int) bool {
|
||||||
|
return src.JobSettings.Tasks[i].TaskKey < src.JobSettings.Tasks[j].TaskKey
|
||||||
|
})
|
||||||
|
|
||||||
for _, v := range src.Tasks {
|
for _, v := range src.Tasks {
|
||||||
var t schema.ResourceJobTask
|
var t schema.ResourceJobTask
|
||||||
conv(v, &t)
|
conv(v, &t)
|
||||||
|
|
|
@ -3,6 +3,7 @@ package tfdyn
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"github.com/databricks/cli/bundle/internal/tf/schema"
|
"github.com/databricks/cli/bundle/internal/tf/schema"
|
||||||
"github.com/databricks/cli/libs/dyn"
|
"github.com/databricks/cli/libs/dyn"
|
||||||
|
@ -19,8 +20,38 @@ func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
|
||||||
log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary)
|
log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sort the tasks of each job in the bundle by task key. Sorting
|
||||||
|
// the task keys ensures that the diff computed by terraform is correct and avoids
|
||||||
|
// recreates. For more details see the NOTE at
|
||||||
|
// https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/job#example-usage
|
||||||
|
// and https://github.com/databricks/terraform-provider-databricks/issues/4011
|
||||||
|
// and https://github.com/databricks/cli/pull/1776
|
||||||
|
vout := vin
|
||||||
|
var err error
|
||||||
|
tasks, ok := vin.Get("tasks").AsSequence()
|
||||||
|
if ok {
|
||||||
|
sort.Slice(tasks, func(i, j int) bool {
|
||||||
|
// We sort the tasks by their task key. Tasks without task keys are ordered
|
||||||
|
// before tasks with task keys. We do not error for those tasks
|
||||||
|
// since presence of a task_key is validated for in the Jobs backend.
|
||||||
|
tk1, ok := tasks[i].Get("task_key").AsString()
|
||||||
|
if !ok {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
tk2, ok := tasks[j].Get("task_key").AsString()
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return tk1 < tk2
|
||||||
|
})
|
||||||
|
vout, err = dyn.Set(vin, "tasks", dyn.V(tasks))
|
||||||
|
if err != nil {
|
||||||
|
return dyn.InvalidValue, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Modify top-level keys.
|
// Modify top-level keys.
|
||||||
vout, err := renameKeys(vin, map[string]string{
|
vout, err = renameKeys(vout, map[string]string{
|
||||||
"tasks": "task",
|
"tasks": "task",
|
||||||
"job_clusters": "job_cluster",
|
"job_clusters": "job_cluster",
|
||||||
"parameters": "parameter",
|
"parameters": "parameter",
|
||||||
|
|
|
@ -42,8 +42,8 @@ func TestConvertJob(t *testing.T) {
|
||||||
},
|
},
|
||||||
Tasks: []jobs.Task{
|
Tasks: []jobs.Task{
|
||||||
{
|
{
|
||||||
TaskKey: "task_key",
|
TaskKey: "task_key_b",
|
||||||
JobClusterKey: "job_cluster_key",
|
JobClusterKey: "job_cluster_key_b",
|
||||||
Libraries: []compute.Library{
|
Libraries: []compute.Library{
|
||||||
{
|
{
|
||||||
Pypi: &compute.PythonPyPiLibrary{
|
Pypi: &compute.PythonPyPiLibrary{
|
||||||
|
@ -55,6 +55,17 @@ func TestConvertJob(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
TaskKey: "task_key_a",
|
||||||
|
JobClusterKey: "job_cluster_key_a",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
TaskKey: "task_key_c",
|
||||||
|
JobClusterKey: "job_cluster_key_c",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Description: "missing task key 😱",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Permissions: []resources.Permission{
|
Permissions: []resources.Permission{
|
||||||
|
@ -100,8 +111,15 @@ func TestConvertJob(t *testing.T) {
|
||||||
},
|
},
|
||||||
"task": []any{
|
"task": []any{
|
||||||
map[string]any{
|
map[string]any{
|
||||||
"task_key": "task_key",
|
"description": "missing task key 😱",
|
||||||
"job_cluster_key": "job_cluster_key",
|
},
|
||||||
|
map[string]any{
|
||||||
|
"task_key": "task_key_a",
|
||||||
|
"job_cluster_key": "job_cluster_key_a",
|
||||||
|
},
|
||||||
|
map[string]any{
|
||||||
|
"task_key": "task_key_b",
|
||||||
|
"job_cluster_key": "job_cluster_key_b",
|
||||||
"library": []any{
|
"library": []any{
|
||||||
map[string]any{
|
map[string]any{
|
||||||
"pypi": map[string]any{
|
"pypi": map[string]any{
|
||||||
|
@ -113,6 +131,10 @@ func TestConvertJob(t *testing.T) {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
map[string]any{
|
||||||
|
"task_key": "task_key_c",
|
||||||
|
"job_cluster_key": "job_cluster_key_c",
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}, out.Job["my_job"])
|
}, out.Job["my_job"])
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue