mirror of https://github.com/databricks/cli.git
210 lines
5.6 KiB
Go
210 lines
5.6 KiB
Go
package mutator_test
|
|
|
|
import (
|
|
"context"
|
|
"testing"
|
|
|
|
"github.com/databricks/cli/bundle"
|
|
"github.com/databricks/cli/bundle/config"
|
|
"github.com/databricks/cli/bundle/config/mutator"
|
|
"github.com/databricks/cli/bundle/config/resources"
|
|
"github.com/databricks/databricks-sdk-go/service/compute"
|
|
"github.com/databricks/databricks-sdk-go/service/jobs"
|
|
"github.com/stretchr/testify/assert"
|
|
"github.com/stretchr/testify/require"
|
|
)
|
|
|
|
func TestOverrideComputeModeDevelopment(t *testing.T) {
|
|
t.Setenv("DATABRICKS_CLUSTER_ID", "")
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Bundle: config.Bundle{
|
|
Mode: config.Development,
|
|
ClusterId: "newClusterID",
|
|
},
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
NewCluster: &compute.ClusterSpec{
|
|
SparkVersion: "14.2.x-scala2.12",
|
|
},
|
|
},
|
|
{
|
|
ExistingClusterId: "cluster2",
|
|
},
|
|
{
|
|
EnvironmentKey: "environment_key",
|
|
},
|
|
{
|
|
JobClusterKey: "cluster_key",
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.NoError(t, diags.Error())
|
|
assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster)
|
|
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
|
|
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId)
|
|
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[2].ExistingClusterId)
|
|
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[3].ExistingClusterId)
|
|
|
|
assert.Nil(t, b.Config.Resources.Jobs["job1"].Tasks[0].NewCluster)
|
|
assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[2].EnvironmentKey)
|
|
assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey)
|
|
}
|
|
|
|
func TestOverrideComputeModeDefault(t *testing.T) {
|
|
t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId")
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Bundle: config.Bundle{
|
|
Mode: "",
|
|
},
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
NewCluster: &compute.ClusterSpec{},
|
|
},
|
|
{
|
|
ExistingClusterId: "cluster2",
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.Empty(t, diags)
|
|
assert.Equal(t, "newClusterId", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
|
|
assert.Equal(t, "newClusterId", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId)
|
|
}
|
|
|
|
func TestOverrideComputePipelineTask(t *testing.T) {
|
|
t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId")
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
PipelineTask: &jobs.PipelineTask{},
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.NoError(t, diags.Error())
|
|
assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
|
|
}
|
|
|
|
func TestOverrideComputeForEachTask(t *testing.T) {
|
|
t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId")
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
ForEachTask: &jobs.ForEachTask{},
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.NoError(t, diags.Error())
|
|
assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ForEachTask.Task)
|
|
}
|
|
|
|
func TestOverrideComputeModeProduction(t *testing.T) {
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Bundle: config.Bundle{
|
|
Mode: config.Production,
|
|
ClusterId: "newClusterID",
|
|
},
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
NewCluster: &compute.ClusterSpec{},
|
|
},
|
|
{
|
|
ExistingClusterId: "cluster2",
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.Len(t, diags, 1)
|
|
assert.Equal(t, "overriding compute for a target that uses 'mode: production' is not recommended", diags[0].Summary)
|
|
assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId)
|
|
}
|
|
|
|
func TestOverrideComputeModeProductionIgnoresVariable(t *testing.T) {
|
|
t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId")
|
|
b := &bundle.Bundle{
|
|
Config: config.Root{
|
|
Bundle: config.Bundle{
|
|
Mode: config.Production,
|
|
},
|
|
Resources: config.Resources{
|
|
Jobs: map[string]*resources.Job{
|
|
"job1": {JobSettings: &jobs.JobSettings{
|
|
Name: "job1",
|
|
Tasks: []jobs.Task{
|
|
{
|
|
NewCluster: &compute.ClusterSpec{},
|
|
},
|
|
{
|
|
ExistingClusterId: "cluster2",
|
|
},
|
|
},
|
|
}},
|
|
},
|
|
},
|
|
},
|
|
}
|
|
|
|
m := mutator.OverrideCompute()
|
|
diags := bundle.Apply(context.Background(), b, m)
|
|
require.Len(t, diags, 1)
|
|
assert.Equal(t, "the DATABRICKS_CLUSTER_ID variable is set but is ignored since the current target uses 'mode: production'", diags[0].Summary)
|
|
assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId)
|
|
}
|