mirror of https://github.com/databricks/cli.git
Merge job parameters based on their name (#1659)
## Changes This change enables overriding the default value of job parameters in target overrides. This is the same approach we already take for job clusters and job tasks. Closes #1620. ## Tests Mutator unit tests and lightweight end-to-end tests.
This commit is contained in:
parent
d26f3f4863
commit
f3ffded3bf
|
@ -0,0 +1,45 @@
|
|||
package mutator
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/libs/diag"
|
||||
"github.com/databricks/cli/libs/dyn"
|
||||
"github.com/databricks/cli/libs/dyn/merge"
|
||||
)
|
||||
|
||||
type mergeJobParameters struct{}
|
||||
|
||||
func MergeJobParameters() bundle.Mutator {
|
||||
return &mergeJobParameters{}
|
||||
}
|
||||
|
||||
func (m *mergeJobParameters) Name() string {
|
||||
return "MergeJobParameters"
|
||||
}
|
||||
|
||||
func (m *mergeJobParameters) parameterNameString(v dyn.Value) string {
|
||||
switch v.Kind() {
|
||||
case dyn.KindInvalid, dyn.KindNil:
|
||||
return ""
|
||||
case dyn.KindString:
|
||||
return v.MustString()
|
||||
default:
|
||||
panic("task key must be a string")
|
||||
}
|
||||
}
|
||||
|
||||
func (m *mergeJobParameters) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
|
||||
err := b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) {
|
||||
if v.Kind() == dyn.KindNil {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
return dyn.Map(v, "resources.jobs", dyn.Foreach(func(_ dyn.Path, job dyn.Value) (dyn.Value, error) {
|
||||
return dyn.Map(job, "parameters", merge.ElementsByKey("name", m.parameterNameString))
|
||||
}))
|
||||
})
|
||||
|
||||
return diag.FromErr(err)
|
||||
}
|
|
@ -0,0 +1,80 @@
|
|||
package mutator_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/databricks/cli/bundle"
|
||||
"github.com/databricks/cli/bundle/config"
|
||||
"github.com/databricks/cli/bundle/config/mutator"
|
||||
"github.com/databricks/cli/bundle/config/resources"
|
||||
"github.com/databricks/databricks-sdk-go/service/jobs"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestMergeJobParameters(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
"foo": {
|
||||
JobSettings: &jobs.JobSettings{
|
||||
Parameters: []jobs.JobParameterDefinition{
|
||||
{
|
||||
Name: "foo",
|
||||
Default: "v1",
|
||||
},
|
||||
{
|
||||
Name: "bar",
|
||||
Default: "v1",
|
||||
},
|
||||
{
|
||||
Name: "foo",
|
||||
Default: "v2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.MergeJobParameters())
|
||||
assert.NoError(t, diags.Error())
|
||||
|
||||
j := b.Config.Resources.Jobs["foo"]
|
||||
|
||||
assert.Len(t, j.Parameters, 2)
|
||||
assert.Equal(t, "foo", j.Parameters[0].Name)
|
||||
assert.Equal(t, "v2", j.Parameters[0].Default)
|
||||
assert.Equal(t, "bar", j.Parameters[1].Name)
|
||||
assert.Equal(t, "v1", j.Parameters[1].Default)
|
||||
}
|
||||
|
||||
func TestMergeJobParametersWithNilKey(t *testing.T) {
|
||||
b := &bundle.Bundle{
|
||||
Config: config.Root{
|
||||
Resources: config.Resources{
|
||||
Jobs: map[string]*resources.Job{
|
||||
"foo": {
|
||||
JobSettings: &jobs.JobSettings{
|
||||
Parameters: []jobs.JobParameterDefinition{
|
||||
{
|
||||
Default: "v1",
|
||||
},
|
||||
{
|
||||
Default: "v2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
diags := bundle.Apply(context.Background(), b, mutator.MergeJobParameters())
|
||||
assert.NoError(t, diags.Error())
|
||||
assert.Len(t, b.Config.Resources.Jobs["foo"].Parameters, 1)
|
||||
}
|
|
@ -21,6 +21,7 @@ func Initialize() bundle.Mutator {
|
|||
[]bundle.Mutator{
|
||||
mutator.RewriteSyncPaths(),
|
||||
mutator.MergeJobClusters(),
|
||||
mutator.MergeJobParameters(),
|
||||
mutator.MergeJobTasks(),
|
||||
mutator.MergePipelineClusters(),
|
||||
mutator.InitializeWorkspaceClient(),
|
||||
|
|
|
@ -37,6 +37,7 @@ func loadTargetWithDiags(path, env string) (*bundle.Bundle, diag.Diagnostics) {
|
|||
phases.LoadNamedTarget(env),
|
||||
mutator.RewriteSyncPaths(),
|
||||
mutator.MergeJobClusters(),
|
||||
mutator.MergeJobParameters(),
|
||||
mutator.MergeJobTasks(),
|
||||
mutator.MergePipelineClusters(),
|
||||
))
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
bundle:
|
||||
name: override_job_parameters
|
||||
|
||||
workspace:
|
||||
host: https://acme.cloud.databricks.com/
|
||||
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
name: job
|
||||
parameters:
|
||||
- name: foo
|
||||
default: v1
|
||||
- name: bar
|
||||
default: v1
|
||||
|
||||
targets:
|
||||
development:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
parameters:
|
||||
- name: foo
|
||||
default: v2
|
||||
|
||||
staging:
|
||||
resources:
|
||||
jobs:
|
||||
foo:
|
||||
parameters:
|
||||
- name: bar
|
||||
default: v2
|
|
@ -0,0 +1,31 @@
|
|||
package config_tests
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestOverrideJobParametersDev(t *testing.T) {
|
||||
b := loadTarget(t, "./override_job_parameters", "development")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
|
||||
p := b.Config.Resources.Jobs["foo"].Parameters
|
||||
assert.Len(t, p, 2)
|
||||
assert.Equal(t, "foo", p[0].Name)
|
||||
assert.Equal(t, "v2", p[0].Default)
|
||||
assert.Equal(t, "bar", p[1].Name)
|
||||
assert.Equal(t, "v1", p[1].Default)
|
||||
}
|
||||
|
||||
func TestOverrideJobParametersStaging(t *testing.T) {
|
||||
b := loadTarget(t, "./override_job_parameters", "staging")
|
||||
assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name)
|
||||
|
||||
p := b.Config.Resources.Jobs["foo"].Parameters
|
||||
assert.Len(t, p, 2)
|
||||
assert.Equal(t, "foo", p[0].Name)
|
||||
assert.Equal(t, "v1", p[0].Default)
|
||||
assert.Equal(t, "bar", p[1].Name)
|
||||
assert.Equal(t, "v2", p[1].Default)
|
||||
}
|
Loading…
Reference in New Issue