mirror of https://github.com/databricks/cli.git
Compare commits
No commits in common. "382a4efd6efb98adbc7532a3286d81521c899843" and "4233a7c2923c3acc5fc81e7cb6f0e45c4967dc2a" have entirely different histories.
382a4efd6e
...
4233a7c292
|
@ -67,31 +67,31 @@ func showSingleNodeClusterWarning(ctx context.Context, v dyn.Value) bool {
|
||||||
|
|
||||||
profile, ok := conf.SparkConf["spark.databricks.cluster.profile"]
|
profile, ok := conf.SparkConf["spark.databricks.cluster.profile"]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf(ctx, "spark_conf spark.databricks.cluster.profile not found in single-node cluster spec")
|
log.Warnf(ctx, "spark_conf spark.databricks.cluster.profile not found in single-node cluster spec")
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if profile != "singleNode" {
|
if profile != "singleNode" {
|
||||||
log.Debugf(ctx, "spark_conf spark.databricks.cluster.profile is not singleNode in single-node cluster spec: %s", profile)
|
log.Warnf(ctx, "spark_conf spark.databricks.cluster.profile is not singleNode in single-node cluster spec: %s", profile)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
master, ok := conf.SparkConf["spark.master"]
|
master, ok := conf.SparkConf["spark.master"]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf(ctx, "spark_conf spark.master not found in single-node cluster spec")
|
log.Warnf(ctx, "spark_conf spark.master not found in single-node cluster spec")
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if !strings.HasPrefix(master, "local") {
|
if !strings.HasPrefix(master, "local") {
|
||||||
log.Debugf(ctx, "spark_conf spark.master does not start with local in single-node cluster spec: %s", master)
|
log.Warnf(ctx, "spark_conf spark.master does not start with local in single-node cluster spec: %s", master)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
resourceClass, ok := conf.CustomTags["ResourceClass"]
|
resourceClass, ok := conf.CustomTags["ResourceClass"]
|
||||||
if !ok {
|
if !ok {
|
||||||
log.Debugf(ctx, "custom_tag ResourceClass not found in single-node cluster spec")
|
log.Warnf(ctx, "custom_tag ResourceClass not found in single-node cluster spec")
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
if resourceClass != "SingleNode" {
|
if resourceClass != "SingleNode" {
|
||||||
log.Debugf(ctx, "custom_tag ResourceClass is not SingleNode in single-node cluster spec: %s", resourceClass)
|
log.Warnf(ctx, "custom_tag ResourceClass is not SingleNode in single-node cluster spec: %s", resourceClass)
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,8 +108,6 @@ func (m *singleNodeCluster) Apply(ctx context.Context, rb bundle.ReadOnlyBundle)
|
||||||
dyn.NewPattern(dyn.Key("resources"), dyn.Key("jobs"), dyn.AnyKey(), dyn.Key("job_clusters"), dyn.AnyIndex(), dyn.Key("new_cluster")),
|
dyn.NewPattern(dyn.Key("resources"), dyn.Key("jobs"), dyn.AnyKey(), dyn.Key("job_clusters"), dyn.AnyIndex(), dyn.Key("new_cluster")),
|
||||||
// Job task clusters
|
// Job task clusters
|
||||||
dyn.NewPattern(dyn.Key("resources"), dyn.Key("jobs"), dyn.AnyKey(), dyn.Key("tasks"), dyn.AnyIndex(), dyn.Key("new_cluster")),
|
dyn.NewPattern(dyn.Key("resources"), dyn.Key("jobs"), dyn.AnyKey(), dyn.Key("tasks"), dyn.AnyIndex(), dyn.Key("new_cluster")),
|
||||||
// Job for each task clusters
|
|
||||||
dyn.NewPattern(dyn.Key("resources"), dyn.Key("jobs"), dyn.AnyKey(), dyn.Key("tasks"), dyn.AnyIndex(), dyn.Key("for_each_task"), dyn.Key("task"), dyn.Key("new_cluster")),
|
|
||||||
// Pipeline clusters
|
// Pipeline clusters
|
||||||
dyn.NewPattern(dyn.Key("resources"), dyn.Key("pipelines"), dyn.AnyKey(), dyn.Key("clusters"), dyn.AnyIndex()),
|
dyn.NewPattern(dyn.Key("resources"), dyn.Key("pipelines"), dyn.AnyKey(), dyn.Key("clusters"), dyn.AnyIndex()),
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,12 +16,8 @@ import (
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func failCases() []struct {
|
func TestValidateSingleNodeClusterFail(t *testing.T) {
|
||||||
name string
|
failCases := []struct {
|
||||||
sparkConf map[string]string
|
|
||||||
customTags map[string]string
|
|
||||||
} {
|
|
||||||
return []struct {
|
|
||||||
name string
|
name string
|
||||||
sparkConf map[string]string
|
sparkConf map[string]string
|
||||||
customTags map[string]string
|
customTags map[string]string
|
||||||
|
@ -87,13 +83,12 @@ func failCases() []struct {
|
||||||
customTags: map[string]string{"ResourceClass": "SingleNode"},
|
customTags: map[string]string{"ResourceClass": "SingleNode"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterFailForInteractiveClusters(t *testing.T) {
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
for _, tc := range failCases() {
|
// Interactive clusters.
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
for _, tc := range failCases {
|
||||||
|
t.Run("interactive_"+tc.name, func(t *testing.T) {
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -128,13 +123,10 @@ func TestValidateSingleNodeClusterFailForInteractiveClusters(t *testing.T) {
|
||||||
}, diags)
|
}, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) {
|
// Job clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range failCases {
|
||||||
|
t.Run("job_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range failCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -178,13 +170,10 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) {
|
||||||
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterFailForJobTaskClusters(t *testing.T) {
|
// Job task clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range failCases {
|
||||||
|
t.Run("task_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range failCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -227,13 +216,10 @@ func TestValidateSingleNodeClusterFailForJobTaskClusters(t *testing.T) {
|
||||||
}, diags)
|
}, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterFailForPipelineClusters(t *testing.T) {
|
// Pipeline clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range failCases {
|
||||||
|
t.Run("pipeline_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range failCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -273,72 +259,14 @@ func TestValidateSingleNodeClusterFailForPipelineClusters(t *testing.T) {
|
||||||
}, diags)
|
}, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterFailForJobForEachTaskCluster(t *testing.T) {
|
func TestValidateSingleNodeClusterPass(t *testing.T) {
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
for _, tc := range failCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
Config: config.Root{
|
|
||||||
Resources: config.Resources{
|
|
||||||
Jobs: map[string]*resources.Job{
|
|
||||||
"foo": {
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
Tasks: []jobs.Task{
|
|
||||||
{
|
|
||||||
ForEachTask: &jobs.ForEachTask{
|
|
||||||
Task: jobs.Task{
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
ClusterName: "my_cluster",
|
|
||||||
SparkConf: tc.sparkConf,
|
|
||||||
CustomTags: tc.customTags,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
bundletest.SetLocation(b, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster", []dyn.Location{{File: "e.yml", Line: 1, Column: 1}})
|
|
||||||
|
|
||||||
// We can't set num_workers to 0 explicitly in the typed configuration.
|
|
||||||
// Do it on the dyn.Value directly.
|
|
||||||
bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) {
|
|
||||||
return dyn.Set(v, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster.num_workers", dyn.V(0))
|
|
||||||
})
|
|
||||||
|
|
||||||
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster())
|
|
||||||
assert.Equal(t, diag.Diagnostics{
|
|
||||||
{
|
|
||||||
Severity: diag.Warning,
|
|
||||||
Summary: singleNodeWarningSummary,
|
|
||||||
Detail: singleNodeWarningDetail,
|
|
||||||
Locations: []dyn.Location{{File: "e.yml", Line: 1, Column: 1}},
|
|
||||||
Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.foo.tasks[0].for_each_task.task.new_cluster")},
|
|
||||||
},
|
|
||||||
}, diags)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func passCases() []struct {
|
|
||||||
name string
|
|
||||||
numWorkers *int
|
|
||||||
sparkConf map[string]string
|
|
||||||
customTags map[string]string
|
|
||||||
policyId string
|
|
||||||
} {
|
|
||||||
zero := 0
|
zero := 0
|
||||||
one := 1
|
one := 1
|
||||||
|
|
||||||
return []struct {
|
passCases := []struct {
|
||||||
name string
|
name string
|
||||||
numWorkers *int
|
numWorkers *int
|
||||||
sparkConf map[string]string
|
sparkConf map[string]string
|
||||||
|
@ -369,13 +297,12 @@ func passCases() []struct {
|
||||||
numWorkers: &zero,
|
numWorkers: &zero,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterPassInteractiveClusters(t *testing.T) {
|
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
|
|
||||||
for _, tc := range passCases() {
|
// Interactive clusters.
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
for _, tc := range passCases {
|
||||||
|
t.Run("interactive_"+tc.name, func(t *testing.T) {
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -402,13 +329,10 @@ func TestValidateSingleNodeClusterPassInteractiveClusters(t *testing.T) {
|
||||||
assert.Empty(t, diags)
|
assert.Empty(t, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterPassJobClusters(t *testing.T) {
|
// Job clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range passCases {
|
||||||
|
t.Run("job_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range passCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -442,13 +366,10 @@ func TestValidateSingleNodeClusterPassJobClusters(t *testing.T) {
|
||||||
assert.Empty(t, diags)
|
assert.Empty(t, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterPassJobTaskClusters(t *testing.T) {
|
// Job task clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range passCases {
|
||||||
|
t.Run("task_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range passCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -482,13 +403,10 @@ func TestValidateSingleNodeClusterPassJobTaskClusters(t *testing.T) {
|
||||||
assert.Empty(t, diags)
|
assert.Empty(t, diags)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterPassPipelineClusters(t *testing.T) {
|
// Pipeline clusters.
|
||||||
ctx := context.Background()
|
for _, tc := range passCases {
|
||||||
|
t.Run("pipeline_"+tc.name, func(t *testing.T) {
|
||||||
for _, tc := range passCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
b := &bundle.Bundle{
|
||||||
Config: config.Root{
|
Config: config.Root{
|
||||||
Resources: config.Resources{
|
Resources: config.Resources{
|
||||||
|
@ -520,47 +438,3 @@ func TestValidateSingleNodeClusterPassPipelineClusters(t *testing.T) {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestValidateSingleNodeClusterPassJobForEachTaskCluster(t *testing.T) {
|
|
||||||
ctx := context.Background()
|
|
||||||
|
|
||||||
for _, tc := range passCases() {
|
|
||||||
t.Run(tc.name, func(t *testing.T) {
|
|
||||||
b := &bundle.Bundle{
|
|
||||||
Config: config.Root{
|
|
||||||
Resources: config.Resources{
|
|
||||||
Jobs: map[string]*resources.Job{
|
|
||||||
"foo": {
|
|
||||||
JobSettings: &jobs.JobSettings{
|
|
||||||
Tasks: []jobs.Task{
|
|
||||||
{
|
|
||||||
ForEachTask: &jobs.ForEachTask{
|
|
||||||
Task: jobs.Task{
|
|
||||||
NewCluster: &compute.ClusterSpec{
|
|
||||||
ClusterName: "my_cluster",
|
|
||||||
SparkConf: tc.sparkConf,
|
|
||||||
CustomTags: tc.customTags,
|
|
||||||
PolicyId: tc.policyId,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if tc.numWorkers != nil {
|
|
||||||
bundletest.Mutate(t, b, func(v dyn.Value) (dyn.Value, error) {
|
|
||||||
return dyn.Set(v, "resources.jobs.foo.tasks[0].for_each_task.task.new_cluster.num_workers", dyn.V(*tc.numWorkers))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
diags := bundle.ApplyReadOnly(ctx, bundle.ReadOnly(b), SingleNodeCluster())
|
|
||||||
assert.Empty(t, diags)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
Loading…
Reference in New Issue