Compare commits

..

No commits in common. "4373e7b513eaa70c509f7cd8cb6f91d896a42c94" and "894f4aab4ee3ec7eefaf7898a17fddb0917e7bfd" have entirely different histories.

46 changed files with 543 additions and 1304 deletions

View File

@ -33,7 +33,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.7 go-version: 1.22.x
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
@ -68,7 +68,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.7 go-version: 1.22.x
# No need to download cached dependencies when running gofmt. # No need to download cached dependencies when running gofmt.
cache: false cache: false
@ -100,7 +100,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.7 go-version: 1.22.x
# Github repo: https://github.com/ajv-validator/ajv-cli # Github repo: https://github.com/ajv-validator/ajv-cli
- name: Install ajv-cli - name: Install ajv-cli

View File

@ -21,7 +21,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.7 go-version: 1.22.x
# The default cache key for this action considers only the `go.sum` file. # The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action # We include .goreleaser.yaml here to differentiate from the cache used by the push action

View File

@ -22,7 +22,7 @@ jobs:
- name: Setup Go - name: Setup Go
uses: actions/setup-go@v5 uses: actions/setup-go@v5
with: with:
go-version: 1.22.7 go-version: 1.22.x
# The default cache key for this action considers only the `go.sum` file. # The default cache key for this action considers only the `go.sum` file.
# We include .goreleaser.yaml here to differentiate from the cache used by the push action # We include .goreleaser.yaml here to differentiate from the cache used by the push action

View File

@ -3,7 +3,6 @@ package loader
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sort" "sort"
"strings" "strings"
@ -11,14 +10,26 @@ import (
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"golang.org/x/exp/maps"
) )
func validateFileFormat(configRoot dyn.Value, filePath string) diag.Diagnostics { var resourceTypes = []string{
for _, resourceDescription := range config.SupportedResources() { "job",
singularName := resourceDescription.SingularName "pipeline",
for _, ext := range []string{fmt.Sprintf(".%s.yml", singularName), fmt.Sprintf(".%s.yaml", singularName)} { "model",
"experiment",
"model_serving_endpoint",
"registered_model",
"quality_monitor",
"schema",
"cluster",
}
func validateFileFormat(r *config.Root, filePath string) diag.Diagnostics {
for _, typ := range resourceTypes {
for _, ext := range []string{fmt.Sprintf(".%s.yml", typ), fmt.Sprintf(".%s.yaml", typ)} {
if strings.HasSuffix(filePath, ext) { if strings.HasSuffix(filePath, ext) {
return validateSingleResourceDefined(configRoot, ext, singularName) return validateSingleResourceDefined(r, ext, typ)
} }
} }
} }
@ -26,7 +37,7 @@ func validateFileFormat(configRoot dyn.Value, filePath string) diag.Diagnostics
return nil return nil
} }
func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.Diagnostics { func validateSingleResourceDefined(r *config.Root, ext, typ string) diag.Diagnostics {
type resource struct { type resource struct {
path dyn.Path path dyn.Path
value dyn.Value value dyn.Value
@ -35,17 +46,16 @@ func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.D
} }
resources := []resource{} resources := []resource{}
supportedResources := config.SupportedResources()
// Gather all resources defined in the resources block. // Gather all resources defined in the resources block.
_, err := dyn.MapByPattern( _, err := dyn.MapByPattern(
configRoot, r.Value(),
dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) { func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// The key for the resource. Eg: "my_job" for jobs.my_job. // The key for the resource. Eg: "my_job" for jobs.my_job.
k := p[2].Key() k := p[2].Key()
// The type of the resource. Eg: "job" for jobs.my_job. // The type of the resource. Eg: "job" for jobs.my_job.
typ := supportedResources[p[1].Key()].SingularName typ := strings.TrimSuffix(p[1].Key(), "s")
resources = append(resources, resource{path: p, value: v, typ: typ, key: k}) resources = append(resources, resource{path: p, value: v, typ: typ, key: k})
return v, nil return v, nil
@ -56,13 +66,13 @@ func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.D
// Gather all resources defined in a target block. // Gather all resources defined in a target block.
_, err = dyn.MapByPattern( _, err = dyn.MapByPattern(
configRoot, r.Value(),
dyn.NewPattern(dyn.Key("targets"), dyn.AnyKey(), dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), dyn.NewPattern(dyn.Key("targets"), dyn.AnyKey(), dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) { func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// The key for the resource. Eg: "my_job" for jobs.my_job. // The key for the resource. Eg: "my_job" for jobs.my_job.
k := p[4].Key() k := p[4].Key()
// The type of the resource. Eg: "job" for jobs.my_job. // The type of the resource. Eg: "job" for jobs.my_job.
typ := supportedResources[p[3].Key()].SingularName typ := strings.TrimSuffix(p[3].Key(), "s")
resources = append(resources, resource{path: p, value: v, typ: typ, key: k}) resources = append(resources, resource{path: p, value: v, typ: typ, key: k})
return v, nil return v, nil
@ -83,26 +93,28 @@ func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.D
seenKeys[rr.key] = struct{}{} seenKeys[rr.key] = struct{}{}
} }
// Format matches. There's at most one resource defined in the file. // Format matches. There's less than or equal to one resource defined in the file.
// The resource is also of the correct type. // The resource is also of the correct type.
if typeMatch && len(seenKeys) <= 1 { if typeMatch && len(seenKeys) <= 1 {
return nil return nil
} }
detail := strings.Builder{} msg := strings.Builder{}
detail.WriteString("The following resources are defined or configured in this file:\n") msg.WriteString(fmt.Sprintf("We recommend only defining a single %s in a file with the %s extension.\n", typ, ext))
lines := []string{}
// Dedup the list of resources before adding them the diagnostic message. This
// is needed because we do not dedup earlier when gathering the resources and
// it's valid to define the same resource in both the resources and targets block.
msg.WriteString("The following resources are defined or configured in this file:\n")
setOfLines := map[string]struct{}{}
for _, r := range resources { for _, r := range resources {
lines = append(lines, fmt.Sprintf(" - %s (%s)\n", r.key, r.typ)) setOfLines[fmt.Sprintf(" - %s (%s)\n", r.key, r.typ)] = struct{}{}
} }
// Sort the lines to print to make the output deterministic. // Sort the line s to print to make the output deterministic.
sort.Strings(lines) listOfLines := maps.Keys(setOfLines)
// Compact the lines before writing them to the message to remove any duplicate lines. sort.Strings(listOfLines)
// This is needed because we do not dedup earlier when gathering the resources for _, l := range listOfLines {
// and it's valid to define the same resource in both the resources and targets block. msg.WriteString(l)
lines = slices.Compact(lines)
for _, l := range lines {
detail.WriteString(l)
} }
locations := []dyn.Location{} locations := []dyn.Location{}
@ -121,9 +133,8 @@ func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.D
return diag.Diagnostics{ return diag.Diagnostics{
{ {
Severity: diag.Recommendation, Severity: diag.Info,
Summary: fmt.Sprintf("We recommend only defining a single %s in a file with the %s extension.", typ, ext), Summary: msg.String(),
Detail: detail.String(),
Locations: locations, Locations: locations,
Paths: paths, Paths: paths,
}, },
@ -154,7 +165,7 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
} }
// Add any diagnostics associated with the file format. // Add any diagnostics associated with the file format.
diags = append(diags, validateFileFormat(this.Value(), m.relPath)...) diags = append(diags, validateFileFormat(this, m.relPath)...)
if diags.HasError() { if diags.HasError() {
return diags return diags
} }

View File

@ -1,13 +1,16 @@
package loader_test package loader
import ( import (
"context" "context"
"path/filepath" "path/filepath"
"reflect"
"strings"
"testing" "testing"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/bundle/internal/bundletest"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -24,7 +27,7 @@ func TestProcessInclude(t *testing.T) {
}, },
} }
m := loader.ProcessInclude(filepath.Join(b.RootPath, "host.yml"), "host.yml") m := ProcessInclude(filepath.Join(b.RootPath, "host.yml"), "host.yml")
assert.Equal(t, "ProcessInclude(host.yml)", m.Name()) assert.Equal(t, "ProcessInclude(host.yml)", m.Name())
// Assert the host value prior to applying the mutator // Assert the host value prior to applying the mutator
@ -36,137 +39,338 @@ func TestProcessInclude(t *testing.T) {
assert.Equal(t, "bar", b.Config.Workspace.Host) assert.Equal(t, "bar", b.Config.Workspace.Host)
} }
func TestProcessIncludeFormatPass(t *testing.T) { func TestProcessIncludeValidatesFileFormat(t *testing.T) {
for _, fileName := range []string{ b := &bundle.Bundle{
"one_job.job.yml", RootPath: "testdata/format",
"one_pipeline.pipeline.yaml", Config: config.Root{
"two_job.yml", Bundle: config.Bundle{
"job_and_pipeline.yml", Name: "format_test",
} { },
t.Run(fileName, func(t *testing.T) { },
b := &bundle.Bundle{ }
RootPath: "testdata/format_pass",
Config: config.Root{
Bundle: config.Bundle{
Name: "format_test",
},
},
}
m := loader.ProcessInclude(filepath.Join(b.RootPath, fileName), fileName) m := ProcessInclude(filepath.Join(b.RootPath, "foo.job.yml"), "foo.job.yml")
diags := bundle.Apply(context.Background(), b, m) diags := bundle.Apply(context.Background(), b, m)
assert.Empty(t, diags) require.NoError(t, diags.Error())
})
// Assert that the diagnostics contain the expected information
assert.Len(t, diags, 1)
assert.Equal(t, diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.\nThe following resources are defined or configured in this file:\n - bar (job)\n - foo (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format/foo.job.yml"), Line: 4, Column: 7},
{File: filepath.FromSlash("testdata/format/foo.job.yml"), Line: 7, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.bar"),
dyn.MustPathFromString("resources.jobs.foo"),
},
},
}, diags)
}
func TestResourceNames(t *testing.T) {
names := []string{}
typ := reflect.TypeOf(config.Resources{})
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
jsonTags := strings.Split(field.Tag.Get("json"), ",")
singularName := strings.TrimSuffix(jsonTags[0], "s")
names = append(names, singularName)
}
// Assert the contents of the two lists are equal. Please add the singular
// name of your resource to resourceNames global if you are adding a new
// resource.
assert.Equal(t, len(resourceTypes), len(names))
for _, name := range names {
assert.Contains(t, resourceTypes, name)
} }
} }
func TestProcessIncludeFormatFail(t *testing.T) { func TestValidateFileFormat(t *testing.T) {
for fileName, expectedDiags := range map[string]diag.Diagnostics{ onlyJob := config.Root{
"single_job.pipeline.yaml": { Resources: config.Resources{
{ Jobs: map[string]*resources.Job{
Severity: diag.Recommendation, "job1": {},
Summary: "We recommend only defining a single pipeline in a file with the .pipeline.yaml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/single_job.pipeline.yaml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_fail/single_job.pipeline.yaml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
}, },
}, },
"job_and_pipeline.job.yml": { Targets: map[string]*config.Target{
{ "target1": {
Severity: diag.Recommendation, Resources: &config.Resources{
Summary: "We recommend only defining a single job in a file with the .job.yml extension.", Jobs: map[string]*resources.Job{
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n", "job1": {},
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/job_and_pipeline.job.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_fail/job_and_pipeline.job.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.pipelines.pipeline1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
"job_and_pipeline.experiment.yml": {
{
Severity: diag.Recommendation,
Summary: "We recommend only defining a single experiment in a file with the .experiment.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/job_and_pipeline.experiment.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_fail/job_and_pipeline.experiment.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.pipelines.pipeline1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
"two_jobs.job.yml": {
{
Severity: diag.Recommendation,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/two_jobs.job.yml"), Line: 4, Column: 7},
{File: filepath.FromSlash("testdata/format_fail/two_jobs.job.yml"), Line: 7, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("resources.jobs.job2"),
},
},
},
"second_job_in_target.job.yml": {
{
Severity: diag.Recommendation,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/second_job_in_target.job.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_fail/second_job_in_target.job.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
"two_jobs_in_target.job.yml": {
{
Severity: diag.Recommendation,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_fail/two_jobs_in_target.job.yml"), Line: 6, Column: 11},
{File: filepath.FromSlash("testdata/format_fail/two_jobs_in_target.job.yml"), Line: 8, Column: 11},
},
Paths: []dyn.Path{
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
} {
t.Run(fileName, func(t *testing.T) {
b := &bundle.Bundle{
RootPath: "testdata/format_fail",
Config: config.Root{
Bundle: config.Bundle{
Name: "format_test",
}, },
}, },
},
},
}
onlyJobBundle := bundle.Bundle{Config: onlyJob}
onlyPipeline := config.Root{
Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{
"pipeline1": {},
},
},
}
onlyPipelineBundle := bundle.Bundle{Config: onlyPipeline}
bothJobAndPipeline := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {},
},
},
Targets: map[string]*config.Target{
"target1": {
Resources: &config.Resources{
Pipelines: map[string]*resources.Pipeline{
"pipeline1": {},
},
},
},
},
}
bothJobAndPipelineBundle := bundle.Bundle{Config: bothJobAndPipeline}
twoJobs := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {},
"job2": {},
},
},
}
twoJobsBundle := bundle.Bundle{Config: twoJobs}
twoJobsTopLevelAndTarget := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job1": {},
},
},
Targets: map[string]*config.Target{
"target1": {
Resources: &config.Resources{
Jobs: map[string]*resources.Job{
"job2": {},
},
},
},
},
}
twoJobsTopLevelAndTargetBundle := bundle.Bundle{Config: twoJobsTopLevelAndTarget}
twoJobsInTarget := config.Root{
Targets: map[string]*config.Target{
"target1": {
Resources: &config.Resources{
Jobs: map[string]*resources.Job{
"job1": {},
"job2": {},
},
},
},
},
}
twoJobsInTargetBundle := bundle.Bundle{Config: twoJobsInTarget}
tcases := []struct {
name string
bundle *bundle.Bundle
expected diag.Diagnostics
fileName string
locations map[string]dyn.Location
}{
{
name: "single job",
bundle: &onlyJobBundle,
expected: nil,
fileName: "foo.job.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.job.yml", Line: 1, Column: 1},
},
},
{
name: "single pipeline",
bundle: &onlyPipelineBundle,
expected: nil,
fileName: "foo.pipeline.yml",
locations: map[string]dyn.Location{
"resources.pipelines.pipeline1": {File: "foo.pipeline.yaml", Line: 1, Column: 1},
},
},
{
name: "single job but extension is pipeline",
bundle: &onlyJobBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single pipeline in a file with the .pipeline.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n",
Locations: []dyn.Location{
{File: "foo.pipeline.yml", Line: 1, Column: 1},
{File: "foo.pipeline.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
fileName: "foo.pipeline.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.pipeline.yml", Line: 1, Column: 1},
"targets.target1.resources.jobs.job1": {File: "foo.pipeline.yml", Line: 2, Column: 2},
},
},
{
name: "job and pipeline",
bundle: &bothJobAndPipelineBundle,
expected: nil,
fileName: "foo.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.yml", Line: 1, Column: 1},
"targets.target1.resources.pipelines.pipeline1": {File: "foo.yml", Line: 2, Column: 2},
},
},
{
name: "job and pipeline but extension is job",
bundle: &bothJobAndPipelineBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n",
Locations: []dyn.Location{
{File: "foo.job.yml", Line: 1, Column: 1},
{File: "foo.job.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.pipelines.pipeline1"),
},
},
},
fileName: "foo.job.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.job.yml", Line: 1, Column: 1},
"targets.target1.resources.pipelines.pipeline1": {File: "foo.job.yml", Line: 2, Column: 2},
},
},
{
name: "job and pipeline but extension is experiment",
bundle: &bothJobAndPipelineBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single experiment in a file with the .experiment.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n",
Locations: []dyn.Location{
{File: "foo.experiment.yml", Line: 1, Column: 1},
{File: "foo.experiment.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.pipelines.pipeline1"),
},
},
},
fileName: "foo.experiment.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.experiment.yml", Line: 1, Column: 1},
"targets.target1.resources.pipelines.pipeline1": {File: "foo.experiment.yml", Line: 2, Column: 2},
},
},
{
name: "two jobs",
bundle: &twoJobsBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: "foo.job.yml", Line: 1, Column: 1},
{File: "foo.job.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("resources.jobs.job2"),
},
},
},
fileName: "foo.job.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.job.yml", Line: 1, Column: 1},
"resources.jobs.job2": {File: "foo.job.yml", Line: 2, Column: 2},
},
},
{
name: "two jobs but extension is simple yaml",
bundle: &twoJobsBundle,
expected: nil,
fileName: "foo.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.yml", Line: 1, Column: 1},
"resources.jobs.job2": {File: "foo.yml", Line: 2, Column: 2},
},
},
{
name: "two jobs in top level and target",
bundle: &twoJobsTopLevelAndTargetBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: "foo.job.yml", Line: 1, Column: 1},
{File: "foo.job.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
fileName: "foo.job.yml",
locations: map[string]dyn.Location{
"resources.jobs.job1": {File: "foo.job.yml", Line: 1, Column: 1},
"targets.target1.resources.jobs.job2": {File: "foo.job.yml", Line: 2, Column: 2},
},
},
{
name: "two jobs in target",
bundle: &twoJobsInTargetBundle,
expected: diag.Diagnostics{
{
Severity: diag.Info,
Summary: "We recommend only defining a single job in a file with the .job.yml extension.\nThe following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: "foo.job.yml", Line: 1, Column: 1},
{File: "foo.job.yml", Line: 2, Column: 2},
},
Paths: []dyn.Path{
dyn.MustPathFromString(("targets.target1.resources.jobs.job1")),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
fileName: "foo.job.yml",
locations: map[string]dyn.Location{
"targets.target1.resources.jobs.job1": {File: "foo.job.yml", Line: 1, Column: 1},
"targets.target1.resources.jobs.job2": {File: "foo.job.yml", Line: 2, Column: 2},
},
},
}
for _, tc := range tcases {
t.Run(tc.name, func(t *testing.T) {
for k, v := range tc.locations {
bundletest.SetLocation(tc.bundle, k, []dyn.Location{v})
} }
m := loader.ProcessInclude(filepath.Join(b.RootPath, fileName), fileName) diags := validateFileFormat(&tc.bundle.Config, tc.fileName)
diags := bundle.Apply(context.Background(), b, m) assert.Equal(t, tc.expected, diags)
require.Len(t, diags, 1)
assert.Equal(t, expectedDiags, diags)
}) })
} }
} }

View File

@ -1,11 +0,0 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -1,11 +0,0 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -1,11 +0,0 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job2:
name: job2

View File

@ -1,11 +0,0 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job1:
description: job1

View File

@ -1,7 +0,0 @@
resources:
jobs:
job1:
name: job1
job2:
name: job2

View File

@ -1,8 +0,0 @@
targets:
target1:
resources:
jobs:
job1:
description: job1
job2:
description: job2

View File

@ -1,11 +0,0 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -1,11 +0,0 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job1:
description: job1

View File

@ -1,5 +0,0 @@
# TODO: Remove all the schema inlined references
resources:
pipelines:
pipeline1:
name: pipeline1

View File

@ -1,7 +0,0 @@
resources:
jobs:
job1:
name: job1
job2:
name: job2

View File

@ -1,115 +0,0 @@
package paths
import (
"github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/dyn"
)
type jobRewritePattern struct {
pattern dyn.Pattern
kind PathKind
skipRewrite func(string) bool
}
func noSkipRewrite(string) bool {
return false
}
func jobTaskRewritePatterns(base dyn.Pattern) []jobRewritePattern {
return []jobRewritePattern{
{
base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")),
PathKindNotebook,
noSkipRewrite,
},
{
base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")),
PathKindWorkspaceFile,
noSkipRewrite,
},
{
base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")),
PathKindDirectory,
noSkipRewrite,
},
{
base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")),
PathKindWorkspaceFile,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")),
PathKindLibrary,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")),
PathKindLibrary,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("requirements")),
PathKindWorkspaceFile,
noSkipRewrite,
},
}
}
func jobRewritePatterns() []jobRewritePattern {
// Base pattern to match all tasks in all jobs.
base := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("jobs"),
dyn.AnyKey(),
dyn.Key("tasks"),
dyn.AnyIndex(),
)
// Compile list of patterns and their respective rewrite functions.
jobEnvironmentsPatterns := []jobRewritePattern{
{
dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("jobs"),
dyn.AnyKey(),
dyn.Key("environments"),
dyn.AnyIndex(),
dyn.Key("spec"),
dyn.Key("dependencies"),
dyn.AnyIndex(),
),
PathKindWithPrefix,
func(s string) bool {
return !libraries.IsLibraryLocal(s)
},
},
}
taskPatterns := jobTaskRewritePatterns(base)
forEachPatterns := jobTaskRewritePatterns(base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
allPatterns := append(taskPatterns, jobEnvironmentsPatterns...)
allPatterns = append(allPatterns, forEachPatterns...)
return allPatterns
}
// VisitJobPaths visits all paths in job resources and applies a function to each path.
func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) {
var err error
var newValue = value
for _, rewritePattern := range jobRewritePatterns() {
newValue, err = dyn.MapByPattern(newValue, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if rewritePattern.skipRewrite(v.MustString()) {
return v, nil
}
return fn(p, rewritePattern.kind, v)
})
if err != nil {
return dyn.InvalidValue, err
}
}
return newValue, nil
}

View File

@ -1,168 +0,0 @@
package paths
import (
"testing"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/cli/libs/dyn"
assert "github.com/databricks/cli/libs/dyn/dynassert"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/require"
)
func TestVisitJobPaths(t *testing.T) {
task0 := jobs.Task{
NotebookTask: &jobs.NotebookTask{
NotebookPath: "abc",
},
}
task1 := jobs.Task{
SparkPythonTask: &jobs.SparkPythonTask{
PythonFile: "abc",
},
}
task2 := jobs.Task{
DbtTask: &jobs.DbtTask{
ProjectDirectory: "abc",
},
}
task3 := jobs.Task{
SqlTask: &jobs.SqlTask{
File: &jobs.SqlTaskFile{
Path: "abc",
},
},
}
task4 := jobs.Task{
Libraries: []compute.Library{
{Whl: "dist/foo.whl"},
},
}
task5 := jobs.Task{
Libraries: []compute.Library{
{Jar: "dist/foo.jar"},
},
}
task6 := jobs.Task{
Libraries: []compute.Library{
{Requirements: "requirements.txt"},
},
}
job0 := &resources.Job{
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
task0,
task1,
task2,
task3,
task4,
task5,
task6,
},
},
}
root := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job0": job0,
},
},
}
actual := visitJobPaths(t, root)
expected := []dyn.Path{
dyn.MustPathFromString("resources.jobs.job0.tasks[0].notebook_task.notebook_path"),
dyn.MustPathFromString("resources.jobs.job0.tasks[1].spark_python_task.python_file"),
dyn.MustPathFromString("resources.jobs.job0.tasks[2].dbt_task.project_directory"),
dyn.MustPathFromString("resources.jobs.job0.tasks[3].sql_task.file.path"),
dyn.MustPathFromString("resources.jobs.job0.tasks[4].libraries[0].whl"),
dyn.MustPathFromString("resources.jobs.job0.tasks[5].libraries[0].jar"),
dyn.MustPathFromString("resources.jobs.job0.tasks[6].libraries[0].requirements"),
}
assert.ElementsMatch(t, expected, actual)
}
func TestVisitJobPaths_environments(t *testing.T) {
environment0 := jobs.JobEnvironment{
Spec: &compute.Environment{
Dependencies: []string{
"dist_0/*.whl",
"dist_1/*.whl",
},
},
}
job0 := &resources.Job{
JobSettings: &jobs.JobSettings{
Environments: []jobs.JobEnvironment{
environment0,
},
},
}
root := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job0": job0,
},
},
}
actual := visitJobPaths(t, root)
expected := []dyn.Path{
dyn.MustPathFromString("resources.jobs.job0.environments[0].spec.dependencies[0]"),
dyn.MustPathFromString("resources.jobs.job0.environments[0].spec.dependencies[1]"),
}
assert.ElementsMatch(t, expected, actual)
}
func TestVisitJobPaths_foreach(t *testing.T) {
task0 := jobs.Task{
ForEachTask: &jobs.ForEachTask{
Task: jobs.Task{
NotebookTask: &jobs.NotebookTask{
NotebookPath: "abc",
},
},
},
}
job0 := &resources.Job{
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{
task0,
},
},
}
root := config.Root{
Resources: config.Resources{
Jobs: map[string]*resources.Job{
"job0": job0,
},
},
}
actual := visitJobPaths(t, root)
expected := []dyn.Path{
dyn.MustPathFromString("resources.jobs.job0.tasks[0].for_each_task.task.notebook_task.notebook_path"),
}
assert.ElementsMatch(t, expected, actual)
}
func visitJobPaths(t *testing.T, root config.Root) []dyn.Path {
var actual []dyn.Path
err := root.Mutate(func(value dyn.Value) (dyn.Value, error) {
return VisitJobPaths(value, func(p dyn.Path, kind PathKind, v dyn.Value) (dyn.Value, error) {
actual = append(actual, p)
return v, nil
})
})
require.NoError(t, err)
return actual
}

View File

@ -1,26 +0,0 @@
package paths
import "github.com/databricks/cli/libs/dyn"
type PathKind int
const (
// PathKindLibrary is a path to a library file
PathKindLibrary = iota
// PathKindNotebook is a path to a notebook file
PathKindNotebook
// PathKindWorkspaceFile is a path to a regular workspace file,
// notebooks are not allowed because they are uploaded a special
// kind of workspace object.
PathKindWorkspaceFile
// PathKindWithPrefix is a path that starts with './'
PathKindWithPrefix
// PathKindDirectory is a path to directory
PathKindDirectory
)
type VisitFunc func(path dyn.Path, kind PathKind, value dyn.Value) (dyn.Value, error)

View File

@ -4,11 +4,97 @@ import (
"fmt" "fmt"
"slices" "slices"
"github.com/databricks/cli/bundle/config/mutator/paths" "github.com/databricks/cli/bundle/libraries"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
type jobRewritePattern struct {
pattern dyn.Pattern
fn rewriteFunc
skipRewrite func(string) bool
}
func noSkipRewrite(string) bool {
return false
}
func rewritePatterns(t *translateContext, base dyn.Pattern) []jobRewritePattern {
return []jobRewritePattern{
{
base.Append(dyn.Key("notebook_task"), dyn.Key("notebook_path")),
t.translateNotebookPath,
noSkipRewrite,
},
{
base.Append(dyn.Key("spark_python_task"), dyn.Key("python_file")),
t.translateFilePath,
noSkipRewrite,
},
{
base.Append(dyn.Key("dbt_task"), dyn.Key("project_directory")),
t.translateDirectoryPath,
noSkipRewrite,
},
{
base.Append(dyn.Key("sql_task"), dyn.Key("file"), dyn.Key("path")),
t.translateFilePath,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("whl")),
t.translateNoOp,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("jar")),
t.translateNoOp,
noSkipRewrite,
},
{
base.Append(dyn.Key("libraries"), dyn.AnyIndex(), dyn.Key("requirements")),
t.translateFilePath,
noSkipRewrite,
},
}
}
func (t *translateContext) jobRewritePatterns() []jobRewritePattern {
// Base pattern to match all tasks in all jobs.
base := dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("jobs"),
dyn.AnyKey(),
dyn.Key("tasks"),
dyn.AnyIndex(),
)
// Compile list of patterns and their respective rewrite functions.
jobEnvironmentsPatterns := []jobRewritePattern{
{
dyn.NewPattern(
dyn.Key("resources"),
dyn.Key("jobs"),
dyn.AnyKey(),
dyn.Key("environments"),
dyn.AnyIndex(),
dyn.Key("spec"),
dyn.Key("dependencies"),
dyn.AnyIndex(),
),
t.translateNoOpWithPrefix,
func(s string) bool {
return !libraries.IsLibraryLocal(s)
},
},
}
taskPatterns := rewritePatterns(t, base)
forEachPatterns := rewritePatterns(t, base.Append(dyn.Key("for_each_task"), dyn.Key("task")))
allPatterns := append(taskPatterns, jobEnvironmentsPatterns...)
allPatterns = append(allPatterns, forEachPatterns...)
return allPatterns
}
func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error) { func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error) {
var err error var err error
@ -25,41 +111,30 @@ func (t *translateContext) applyJobTranslations(v dyn.Value) (dyn.Value, error)
} }
} }
return paths.VisitJobPaths(v, func(p dyn.Path, kind paths.PathKind, v dyn.Value) (dyn.Value, error) { for _, rewritePattern := range t.jobRewritePatterns() {
key := p[2].Key() v, err = dyn.MapByPattern(v, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
key := p[2].Key()
// Skip path translation if the job is using git source. // Skip path translation if the job is using git source.
if slices.Contains(ignore, key) { if slices.Contains(ignore, key) {
return v, nil return v, nil
} }
dir, err := v.Location().Directory() dir, err := v.Location().Directory()
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("unable to determine directory for job %s: %w", key, err) return dyn.InvalidValue, fmt.Errorf("unable to determine directory for job %s: %w", key, err)
} }
rewritePatternFn, err := t.getRewritePatternFn(kind) sv := v.MustString()
if rewritePattern.skipRewrite(sv) {
return v, nil
}
return t.rewriteRelativeTo(p, v, rewritePattern.fn, dir, fallback[key])
})
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
return t.rewriteRelativeTo(p, v, rewritePatternFn, dir, fallback[key])
})
}
func (t *translateContext) getRewritePatternFn(kind paths.PathKind) (rewriteFunc, error) {
switch kind {
case paths.PathKindLibrary:
return t.translateNoOp, nil
case paths.PathKindNotebook:
return t.translateNotebookPath, nil
case paths.PathKindWorkspaceFile:
return t.translateFilePath, nil
case paths.PathKindDirectory:
return t.translateDirectoryPath, nil
case paths.PathKindWithPrefix:
return t.translateNoOpWithPrefix, nil
} }
return nil, fmt.Errorf("unsupported path kind: %d", kind) return v, nil
} }

View File

@ -59,22 +59,3 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
return found[0], nil return found[0], nil
} }
type ResourceDescription struct {
SingularName string
}
// The keys of the map corresponds to the resource key in the bundle configuration.
func SupportedResources() map[string]ResourceDescription {
return map[string]ResourceDescription{
"jobs": {SingularName: "job"},
"pipelines": {SingularName: "pipeline"},
"models": {SingularName: "model"},
"experiments": {SingularName: "experiment"},
"model_serving_endpoints": {SingularName: "model_serving_endpoint"},
"registered_models": {SingularName: "registered_model"},
"quality_monitors": {SingularName: "quality_monitor"},
"schemas": {SingularName: "schema"},
"clusters": {SingularName: "cluster"},
}
}

View File

@ -3,7 +3,6 @@ package config
import ( import (
"encoding/json" "encoding/json"
"reflect" "reflect"
"strings"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -62,18 +61,3 @@ func TestCustomMarshallerIsImplemented(t *testing.T) {
}, "Resource %s does not have a custom unmarshaller", field.Name) }, "Resource %s does not have a custom unmarshaller", field.Name)
} }
} }
func TestSupportedResources(t *testing.T) {
expected := map[string]ResourceDescription{}
typ := reflect.TypeOf(Resources{})
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
jsonTags := strings.Split(field.Tag.Get("json"), ",")
singularName := strings.TrimSuffix(jsonTags[0], "s")
expected[jsonTags[0]] = ResourceDescription{SingularName: singularName}
}
// Please add your resource to the SupportedResources() function in resources.go
// if you are adding a new resource.
assert.Equal(t, expected, SupportedResources())
}

View File

@ -406,14 +406,7 @@ func (r *Root) MergeTargetOverrides(name string) error {
return r.updateWithDynamicValue(root) return r.updateWithDynamicValue(root)
} }
var allowedVariableDefinitions = []([]string){ var variableKeywords = []string{"default", "lookup"}
{"default", "type", "description"},
{"default", "type"},
{"default", "description"},
{"lookup", "description"},
{"default"},
{"lookup"},
}
// isFullVariableOverrideDef checks if the given value is a full syntax varaible override. // isFullVariableOverrideDef checks if the given value is a full syntax varaible override.
// A full syntax variable override is a map with either 1 of 2 keys. // A full syntax variable override is a map with either 1 of 2 keys.
@ -425,26 +418,26 @@ func isFullVariableOverrideDef(v dyn.Value) bool {
return false return false
} }
// If the map has more than 3 keys, it is not a full variable override. // If the map has more than 2 keys, it is not a full variable override.
if mv.Len() > 3 { if mv.Len() > 2 {
return false return false
} }
for _, keys := range allowedVariableDefinitions { // If the map has 2 keys, one of them should be "default" and the other is "type"
if len(keys) != mv.Len() { if mv.Len() == 2 {
continue if _, ok := mv.GetByString("type"); !ok {
return false
} }
// Check if the keys are the same. if _, ok := mv.GetByString("default"); !ok {
match := true return false
for _, key := range keys {
if _, ok := mv.GetByString(key); !ok {
match = false
break
}
} }
if match { return true
}
for _, keyword := range variableKeywords {
if _, ok := mv.GetByString(keyword); ok {
return true return true
} }
} }

View File

@ -6,7 +6,6 @@ import (
"testing" "testing"
"github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/config/variable"
"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -170,87 +169,3 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) {
assert.Equal(t, "complex var", root.Variables["complex"].Description) assert.Equal(t, "complex var", root.Variables["complex"].Description)
} }
func TestIsFullVariableOverrideDef(t *testing.T) {
testCases := []struct {
value dyn.Value
expected bool
}{
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
"default": dyn.V("foo"),
"description": dyn.V("foo var"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
"lookup": dyn.V("foo"),
"description": dyn.V("foo var"),
}),
expected: false,
},
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
"default": dyn.V("foo"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
"lookup": dyn.V("foo"),
}),
expected: false,
},
{
value: dyn.V(map[string]dyn.Value{
"description": dyn.V("string"),
"default": dyn.V("foo"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"description": dyn.V("string"),
"lookup": dyn.V("foo"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"default": dyn.V("foo"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"lookup": dyn.V("foo"),
}),
expected: true,
},
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
}),
expected: false,
},
{
value: dyn.V(map[string]dyn.Value{
"type": dyn.V("string"),
"default": dyn.V("foo"),
"description": dyn.V("foo var"),
"lookup": dyn.V("foo"),
}),
expected: false,
},
}
for i, tc := range testCases {
assert.Equal(t, tc.expected, isFullVariableOverrideDef(tc.value), "test case %d", i)
}
}

View File

@ -1,161 +0,0 @@
package validate
import (
"context"
"fmt"
"strings"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/databricks/databricks-sdk-go/service/jobs"
)
// JobTaskClusterSpec validates that job tasks have cluster spec defined
// if task requires a cluster
func JobTaskClusterSpec() bundle.ReadOnlyMutator {
return &jobTaskClusterSpec{}
}
type jobTaskClusterSpec struct {
}
func (v *jobTaskClusterSpec) Name() string {
return "validate:job_task_cluster_spec"
}
func (v *jobTaskClusterSpec) Apply(ctx context.Context, rb bundle.ReadOnlyBundle) diag.Diagnostics {
diags := diag.Diagnostics{}
jobsPath := dyn.NewPath(dyn.Key("resources"), dyn.Key("jobs"))
for resourceName, job := range rb.Config().Resources.Jobs {
resourcePath := jobsPath.Append(dyn.Key(resourceName))
for taskIndex, task := range job.Tasks {
taskPath := resourcePath.Append(dyn.Key("tasks"), dyn.Index(taskIndex))
diags = diags.Extend(validateJobTask(rb, task, taskPath))
}
}
return diags
}
func validateJobTask(rb bundle.ReadOnlyBundle, task jobs.Task, taskPath dyn.Path) diag.Diagnostics {
diags := diag.Diagnostics{}
var specified []string
var unspecified []string
if task.JobClusterKey != "" {
specified = append(specified, "job_cluster_key")
} else {
unspecified = append(unspecified, "job_cluster_key")
}
if task.EnvironmentKey != "" {
specified = append(specified, "environment_key")
} else {
unspecified = append(unspecified, "environment_key")
}
if task.ExistingClusterId != "" {
specified = append(specified, "existing_cluster_id")
} else {
unspecified = append(unspecified, "existing_cluster_id")
}
if task.NewCluster != nil {
specified = append(specified, "new_cluster")
} else {
unspecified = append(unspecified, "new_cluster")
}
if task.ForEachTask != nil {
forEachTaskPath := taskPath.Append(dyn.Key("for_each_task"), dyn.Key("task"))
diags = diags.Extend(validateJobTask(rb, task.ForEachTask.Task, forEachTaskPath))
}
if isComputeTask(task) && len(specified) == 0 {
if task.NotebookTask != nil {
// notebook tasks without cluster spec will use notebook environment
} else {
// path might be not very helpful, adding user-specified task key clarifies the context
detail := fmt.Sprintf(
"Task %q requires a cluster or an environment to run.\nSpecify one of the following fields: %s.",
task.TaskKey,
strings.Join(unspecified, ", "),
)
diags = diags.Append(diag.Diagnostic{
Severity: diag.Error,
Summary: "Missing required cluster or environment settings",
Detail: detail,
Locations: rb.Config().GetLocations(taskPath.String()),
Paths: []dyn.Path{taskPath},
})
}
}
return diags
}
// isComputeTask returns true if the task runs on a cluster or serverless GC
func isComputeTask(task jobs.Task) bool {
if task.NotebookTask != nil {
// if warehouse_id is set, it's SQL notebook that doesn't need cluster or serverless GC
if task.NotebookTask.WarehouseId != "" {
return false
} else {
// task settings don't require specifying a cluster/serverless GC, but task itself can run on one
// we handle that case separately in validateJobTask
return true
}
}
if task.PythonWheelTask != nil {
return true
}
if task.DbtTask != nil {
return true
}
if task.SparkJarTask != nil {
return true
}
if task.SparkSubmitTask != nil {
return true
}
if task.SparkPythonTask != nil {
return true
}
if task.SqlTask != nil {
return false
}
if task.PipelineTask != nil {
// while pipelines use clusters, pipeline tasks don't, they only trigger pipelines
return false
}
if task.RunJobTask != nil {
return false
}
if task.ConditionTask != nil {
return false
}
// for each task doesn't use clusters, underlying task(s) can though
if task.ForEachTask != nil {
return false
}
return false
}

View File

@ -1,203 +0,0 @@
package validate
import (
"context"
"testing"
"github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources"
"github.com/databricks/databricks-sdk-go/service/compute"
"github.com/databricks/databricks-sdk-go/service/jobs"
"github.com/stretchr/testify/assert"
)
func TestJobTaskClusterSpec(t *testing.T) {
expectedSummary := "Missing required cluster or environment settings"
type testCase struct {
name string
task jobs.Task
errorPath string
errorDetail string
errorSummary string
}
testCases := []testCase{
{
name: "valid notebook task",
task: jobs.Task{
// while a cluster is needed, it will use notebook environment to create one
NotebookTask: &jobs.NotebookTask{},
},
},
{
name: "valid notebook task (job_cluster_key)",
task: jobs.Task{
JobClusterKey: "cluster1",
NotebookTask: &jobs.NotebookTask{},
},
},
{
name: "valid notebook task (new_cluster)",
task: jobs.Task{
NewCluster: &compute.ClusterSpec{},
NotebookTask: &jobs.NotebookTask{},
},
},
{
name: "valid notebook task (existing_cluster_id)",
task: jobs.Task{
ExistingClusterId: "cluster1",
NotebookTask: &jobs.NotebookTask{},
},
},
{
name: "valid SQL notebook task",
task: jobs.Task{
NotebookTask: &jobs.NotebookTask{
WarehouseId: "warehouse1",
},
},
},
{
name: "valid python wheel task",
task: jobs.Task{
JobClusterKey: "cluster1",
PythonWheelTask: &jobs.PythonWheelTask{},
},
},
{
name: "valid python wheel task (environment_key)",
task: jobs.Task{
EnvironmentKey: "environment1",
PythonWheelTask: &jobs.PythonWheelTask{},
},
},
{
name: "valid dbt task",
task: jobs.Task{
JobClusterKey: "cluster1",
DbtTask: &jobs.DbtTask{},
},
},
{
name: "valid spark jar task",
task: jobs.Task{
JobClusterKey: "cluster1",
SparkJarTask: &jobs.SparkJarTask{},
},
},
{
name: "valid spark submit",
task: jobs.Task{
NewCluster: &compute.ClusterSpec{},
SparkSubmitTask: &jobs.SparkSubmitTask{},
},
},
{
name: "valid spark python task",
task: jobs.Task{
JobClusterKey: "cluster1",
SparkPythonTask: &jobs.SparkPythonTask{},
},
},
{
name: "valid SQL task",
task: jobs.Task{
SqlTask: &jobs.SqlTask{},
},
},
{
name: "valid pipeline task",
task: jobs.Task{
PipelineTask: &jobs.PipelineTask{},
},
},
{
name: "valid run job task",
task: jobs.Task{
RunJobTask: &jobs.RunJobTask{},
},
},
{
name: "valid condition task",
task: jobs.Task{
ConditionTask: &jobs.ConditionTask{},
},
},
{
name: "valid for each task",
task: jobs.Task{
ForEachTask: &jobs.ForEachTask{
Task: jobs.Task{
JobClusterKey: "cluster1",
NotebookTask: &jobs.NotebookTask{},
},
},
},
},
{
name: "invalid python wheel task",
task: jobs.Task{
PythonWheelTask: &jobs.PythonWheelTask{},
TaskKey: "my_task",
},
errorPath: "resources.jobs.job1.tasks[0]",
errorDetail: `Task "my_task" requires a cluster or an environment to run.
Specify one of the following fields: job_cluster_key, environment_key, existing_cluster_id, new_cluster.`,
errorSummary: expectedSummary,
},
{
name: "invalid for each task",
task: jobs.Task{
ForEachTask: &jobs.ForEachTask{
Task: jobs.Task{
PythonWheelTask: &jobs.PythonWheelTask{},
TaskKey: "my_task",
},
},
},
errorPath: "resources.jobs.job1.tasks[0].for_each_task.task",
errorDetail: `Task "my_task" requires a cluster or an environment to run.
Specify one of the following fields: job_cluster_key, environment_key, existing_cluster_id, new_cluster.`,
errorSummary: expectedSummary,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
job := &resources.Job{
JobSettings: &jobs.JobSettings{
Tasks: []jobs.Task{tc.task},
},
}
b := createBundle(map[string]*resources.Job{"job1": job})
diags := bundle.ApplyReadOnly(context.Background(), bundle.ReadOnly(b), JobTaskClusterSpec())
if tc.errorPath != "" || tc.errorDetail != "" || tc.errorSummary != "" {
assert.Len(t, diags, 1)
assert.Len(t, diags[0].Paths, 1)
diag := diags[0]
assert.Equal(t, tc.errorPath, diag.Paths[0].String())
assert.Equal(t, tc.errorSummary, diag.Summary)
assert.Equal(t, tc.errorDetail, diag.Detail)
} else {
assert.ElementsMatch(t, []string{}, diags)
}
})
}
}
func createBundle(jobs map[string]*resources.Job) *bundle.Bundle {
return &bundle.Bundle{
Config: config.Root{
Resources: config.Resources{
Jobs: jobs,
},
},
}
}

View File

@ -34,7 +34,6 @@ func (v *validate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics
JobClusterKeyDefined(), JobClusterKeyDefined(),
FilesToSync(), FilesToSync(),
ValidateSyncPatterns(), ValidateSyncPatterns(),
JobTaskClusterSpec(),
)) ))
} }

View File

@ -4,7 +4,6 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"sort"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/config/resources"
@ -83,10 +82,6 @@ func BundleToTerraform(config *config.Root) *schema.Root {
conv(src, &dst) conv(src, &dst)
if src.JobSettings != nil { if src.JobSettings != nil {
sort.Slice(src.JobSettings.Tasks, func(i, j int) bool {
return src.JobSettings.Tasks[i].TaskKey < src.JobSettings.Tasks[j].TaskKey
})
for _, v := range src.Tasks { for _, v := range src.Tasks {
var t schema.ResourceJobTask var t schema.ResourceJobTask
conv(v, &t) conv(v, &t)

View File

@ -3,7 +3,6 @@ package tfdyn
import ( import (
"context" "context"
"fmt" "fmt"
"sort"
"github.com/databricks/cli/bundle/internal/tf/schema" "github.com/databricks/cli/bundle/internal/tf/schema"
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
@ -20,38 +19,8 @@ func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary) log.Debugf(ctx, "job normalization diagnostic: %s", diag.Summary)
} }
// Sort the tasks of each job in the bundle by task key. Sorting
// the task keys ensures that the diff computed by terraform is correct and avoids
// recreates. For more details see the NOTE at
// https://registry.terraform.io/providers/databricks/databricks/latest/docs/resources/job#example-usage
// and https://github.com/databricks/terraform-provider-databricks/issues/4011
// and https://github.com/databricks/cli/pull/1776
vout := vin
var err error
tasks, ok := vin.Get("tasks").AsSequence()
if ok {
sort.Slice(tasks, func(i, j int) bool {
// We sort the tasks by their task key. Tasks without task keys are ordered
// before tasks with task keys. We do not error for those tasks
// since presence of a task_key is validated for in the Jobs backend.
tk1, ok := tasks[i].Get("task_key").AsString()
if !ok {
return true
}
tk2, ok := tasks[j].Get("task_key").AsString()
if !ok {
return false
}
return tk1 < tk2
})
vout, err = dyn.Set(vin, "tasks", dyn.V(tasks))
if err != nil {
return dyn.InvalidValue, err
}
}
// Modify top-level keys. // Modify top-level keys.
vout, err = renameKeys(vout, map[string]string{ vout, err := renameKeys(vin, map[string]string{
"tasks": "task", "tasks": "task",
"job_clusters": "job_cluster", "job_clusters": "job_cluster",
"parameters": "parameter", "parameters": "parameter",

View File

@ -42,8 +42,8 @@ func TestConvertJob(t *testing.T) {
}, },
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
TaskKey: "task_key_b", TaskKey: "task_key",
JobClusterKey: "job_cluster_key_b", JobClusterKey: "job_cluster_key",
Libraries: []compute.Library{ Libraries: []compute.Library{
{ {
Pypi: &compute.PythonPyPiLibrary{ Pypi: &compute.PythonPyPiLibrary{
@ -55,17 +55,6 @@ func TestConvertJob(t *testing.T) {
}, },
}, },
}, },
{
TaskKey: "task_key_a",
JobClusterKey: "job_cluster_key_a",
},
{
TaskKey: "task_key_c",
JobClusterKey: "job_cluster_key_c",
},
{
Description: "missing task key 😱",
},
}, },
}, },
Permissions: []resources.Permission{ Permissions: []resources.Permission{
@ -111,15 +100,8 @@ func TestConvertJob(t *testing.T) {
}, },
"task": []any{ "task": []any{
map[string]any{ map[string]any{
"description": "missing task key 😱", "task_key": "task_key",
}, "job_cluster_key": "job_cluster_key",
map[string]any{
"task_key": "task_key_a",
"job_cluster_key": "job_cluster_key_a",
},
map[string]any{
"task_key": "task_key_b",
"job_cluster_key": "job_cluster_key_b",
"library": []any{ "library": []any{
map[string]any{ map[string]any{
"pypi": map[string]any{ "pypi": map[string]any{
@ -131,10 +113,6 @@ func TestConvertJob(t *testing.T) {
}, },
}, },
}, },
map[string]any{
"task_key": "task_key_c",
"job_cluster_key": "job_cluster_key_c",
},
}, },
}, out.Job["my_job"]) }, out.Job["my_job"])

View File

@ -56,7 +56,7 @@ const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }}
` `
const infoTemplate = `{{ "Recommendation" | blue }}: {{ .Summary }} const infoTemplate = `{{ "Info" | blue }}: {{ .Summary }}
{{- range $index, $element := .Paths }} {{- range $index, $element := .Paths }}
{{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }} {{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }}
{{- end }} {{- end }}
@ -108,18 +108,12 @@ func buildTrailer(diags diag.Diagnostics) string {
if warnings := len(diags.Filter(diag.Warning)); warnings > 0 { if warnings := len(diags.Filter(diag.Warning)); warnings > 0 {
parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings"))) parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings")))
} }
if recommendations := len(diags.Filter(diag.Recommendation)); recommendations > 0 { if infos := len(diags.Filter(diag.Info)); infos > 0 {
parts = append(parts, color.BlueString(pluralize(recommendations, "recommendation", "recommendations"))) parts = append(parts, color.BlueString(pluralize(infos, "info", "infos")))
} }
switch { if len(parts) > 0 {
case len(parts) >= 2: return fmt.Sprintf("Found %s", strings.Join(parts, " and "))
first := strings.Join(parts[:len(parts)-1], ", ") } else {
last := parts[len(parts)-1]
return fmt.Sprintf("Found %s and %s", first, last)
case len(parts) == 1:
return fmt.Sprintf("Found %s", parts[0])
default:
// No diagnostics to print.
return color.GreenString("Validation OK!") return color.GreenString("Validation OK!")
} }
} }
@ -153,7 +147,7 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti
func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate)) errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate))
warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate)) warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate))
recommendationT := template.Must(template.New("info").Funcs(renderFuncMap).Parse(infoTemplate)) infoT := template.Must(template.New("info").Funcs(renderFuncMap).Parse(infoTemplate))
// Print errors and warnings. // Print errors and warnings.
for _, d := range diags { for _, d := range diags {
@ -163,8 +157,8 @@ func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics)
t = errorT t = errorT
case diag.Warning: case diag.Warning:
t = warningT t = warningT
case diag.Recommendation: case diag.Info:
t = recommendationT t = infoT
} }
for i := range d.Locations { for i := range d.Locations {

View File

@ -46,17 +46,17 @@ func TestRenderTextOutput(t *testing.T) {
"Found 1 error\n", "Found 1 error\n",
}, },
{ {
name: "nil bundle and 1 recommendation", name: "nil bundle and 1 info",
diags: diag.Diagnostics{ diags: diag.Diagnostics{
{ {
Severity: diag.Recommendation, Severity: diag.Info,
Summary: "recommendation", Summary: "info",
}, },
}, },
opts: RenderOptions{RenderSummaryTable: true}, opts: RenderOptions{RenderSummaryTable: true},
expected: "Recommendation: recommendation\n" + expected: "Info: info\n" +
"\n" + "\n" +
"Found 1 recommendation\n", "Found 1 info\n",
}, },
{ {
name: "bundle during 'load' and 1 error", name: "bundle during 'load' and 1 error",
@ -97,7 +97,7 @@ func TestRenderTextOutput(t *testing.T) {
"Found 2 warnings\n", "Found 2 warnings\n",
}, },
{ {
name: "bundle during 'load' and 2 errors, 1 warning and 1 recommendation with details", name: "bundle during 'load' and 2 errors, 1 warning and 1 info with details",
bundle: loadingBundle, bundle: loadingBundle,
diags: diag.Diagnostics{ diags: diag.Diagnostics{
diag.Diagnostic{ diag.Diagnostic{
@ -119,8 +119,8 @@ func TestRenderTextOutput(t *testing.T) {
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
}, },
diag.Diagnostic{ diag.Diagnostic{
Severity: diag.Recommendation, Severity: diag.Info,
Summary: "recommendation (4)", Summary: "info (4)",
Detail: "detail (4)", Detail: "detail (4)",
Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}},
}, },
@ -141,7 +141,7 @@ func TestRenderTextOutput(t *testing.T) {
"\n" + "\n" +
"detail (3)\n" + "detail (3)\n" +
"\n" + "\n" +
"Recommendation: recommendation (4)\n" + "Info: info (4)\n" +
" in foo.py:4:1\n" + " in foo.py:4:1\n" +
"\n" + "\n" +
"detail (4)\n" + "detail (4)\n" +
@ -149,73 +149,7 @@ func TestRenderTextOutput(t *testing.T) {
"Name: test-bundle\n" + "Name: test-bundle\n" +
"Target: test-target\n" + "Target: test-target\n" +
"\n" + "\n" +
"Found 2 errors, 1 warning and 1 recommendation\n", "Found 2 errors and 1 warning and 1 info\n",
},
{
name: "bundle during 'load' and 1 errors, 2 warning and 2 recommendations with details",
bundle: loadingBundle,
diags: diag.Diagnostics{
diag.Diagnostic{
Severity: diag.Error,
Summary: "error (1)",
Detail: "detail (1)",
Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (2)",
Detail: "detail (2)",
Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (3)",
Detail: "detail (3)",
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (4)",
Detail: "detail (4)",
Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (5)",
Detail: "detail (5)",
Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}},
},
},
opts: RenderOptions{RenderSummaryTable: true},
expected: "Error: error (1)\n" +
" in foo.py:1:1\n" +
"\n" +
"detail (1)\n" +
"\n" +
"Warning: warning (2)\n" +
" in foo.py:2:1\n" +
"\n" +
"detail (2)\n" +
"\n" +
"Warning: warning (3)\n" +
" in foo.py:3:1\n" +
"\n" +
"detail (3)\n" +
"\n" +
"Recommendation: recommendation (4)\n" +
" in foo.py:4:1\n" +
"\n" +
"detail (4)\n" +
"\n" +
"Recommendation: recommendation (5)\n" +
" in foo.py:5:1\n" +
"\n" +
"detail (5)\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 1 error, 2 warnings and 2 recommendations\n",
}, },
{ {
name: "bundle during 'init'", name: "bundle during 'init'",
@ -248,7 +182,7 @@ func TestRenderTextOutput(t *testing.T) {
"Validation OK!\n", "Validation OK!\n",
}, },
{ {
name: "nil bundle without summary with 1 error, 1 warning and 1 recommendation", name: "nil bundle without summary with 1 error, 1 warning and 1 info",
bundle: nil, bundle: nil,
diags: diag.Diagnostics{ diags: diag.Diagnostics{
diag.Diagnostic{ diag.Diagnostic{
@ -264,8 +198,8 @@ func TestRenderTextOutput(t *testing.T) {
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
}, },
diag.Diagnostic{ diag.Diagnostic{
Severity: diag.Recommendation, Severity: diag.Info,
Summary: "recommendation (3)", Summary: "info (3)",
Detail: "detail (3)", Detail: "detail (3)",
Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}},
}, },
@ -281,7 +215,7 @@ func TestRenderTextOutput(t *testing.T) {
"\n" + "\n" +
"detail (2)\n" + "detail (2)\n" +
"\n" + "\n" +
"Recommendation: recommendation (3)\n" + "Info: info (3)\n" +
" in foo.py:5:1\n" + " in foo.py:5:1\n" +
"\n" + "\n" +
"detail (3)\n" + "detail (3)\n" +
@ -406,10 +340,10 @@ func TestRenderDiagnostics(t *testing.T) {
"'name' is required\n\n", "'name' is required\n\n",
}, },
{ {
name: "recommendation with multiple paths and locations", name: "info with multiple paths and locations",
diags: diag.Diagnostics{ diags: diag.Diagnostics{
{ {
Severity: diag.Recommendation, Severity: diag.Info,
Summary: "summary", Summary: "summary",
Detail: "detail", Detail: "detail",
Paths: []dyn.Path{ Paths: []dyn.Path{
@ -422,7 +356,7 @@ func TestRenderDiagnostics(t *testing.T) {
}, },
}, },
}, },
expected: "Recommendation: summary\n" + expected: "Info: summary\n" +
" at resources.jobs.xxx\n" + " at resources.jobs.xxx\n" +
" resources.jobs.yyy\n" + " resources.jobs.yyy\n" +
" in foo.yaml:1:2\n" + " in foo.yaml:1:2\n" +

9
go.mod
View File

@ -1,8 +1,6 @@
module github.com/databricks/cli module github.com/databricks/cli
go 1.22.0 go 1.22
toolchain go1.22.7
require ( require (
github.com/Masterminds/semver/v3 v3.3.0 // MIT github.com/Masterminds/semver/v3 v3.3.0 // MIT
@ -12,7 +10,7 @@ require (
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause
github.com/hashicorp/go-version v1.7.0 // MPL 2.0 github.com/hashicorp/go-version v1.7.0 // MPL 2.0
github.com/hashicorp/hc-install v0.9.0 // MPL 2.0 github.com/hashicorp/hc-install v0.7.0 // MPL 2.0
github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0 github.com/hashicorp/terraform-exec v0.21.0 // MPL 2.0
github.com/hashicorp/terraform-json v0.22.1 // MPL 2.0 github.com/hashicorp/terraform-json v0.22.1 // MPL 2.0
github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause github.com/manifoldco/promptui v0.9.0 // BSD-3-Clause
@ -24,7 +22,7 @@ require (
github.com/spf13/pflag v1.0.5 // BSD-3-Clause github.com/spf13/pflag v1.0.5 // BSD-3-Clause
github.com/stretchr/testify v1.9.0 // MIT github.com/stretchr/testify v1.9.0 // MIT
golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225
golang.org/x/mod v0.21.0 golang.org/x/mod v0.20.0
golang.org/x/oauth2 v0.23.0 golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0 golang.org/x/sync v0.8.0
golang.org/x/term v0.24.0 golang.org/x/term v0.24.0
@ -51,7 +49,6 @@ require (
github.com/google/s2a-go v0.1.7 // indirect github.com/google/s2a-go v0.1.7 // indirect
github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect github.com/googleapis/enterprise-certificate-proxy v0.3.2 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-colorable v0.1.13 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect

12
go.sum generated
View File

@ -99,14 +99,10 @@ github.com/googleapis/gax-go/v2 v2.12.4 h1:9gWcmF85Wvq4ryPFvGFaOgPIs1AQX0d0bcbGw
github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI= github.com/googleapis/gax-go/v2 v2.12.4/go.mod h1:KYEYLorsnIGDi/rPC8b5TdlB9kbKoFubselGIoBMCwI=
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k=
github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU=
github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk=
github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY=
github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/hc-install v0.9.0 h1:2dIk8LcvANwtv3QZLckxcjyF5w8KVtiMxu6G6eLhghE= github.com/hashicorp/hc-install v0.7.0 h1:Uu9edVqjKQxxuD28mR5TikkKDd/p55S8vzPC1659aBk=
github.com/hashicorp/hc-install v0.9.0/go.mod h1:+6vOP+mf3tuGgMApVYtmsnDoKWMDcFXeTxCACYZ8SFg= github.com/hashicorp/hc-install v0.7.0/go.mod h1:ELmmzZlGnEcqoUMKUuykHaPCIR1sYLYX+KSggWSKZuA=
github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ= github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ=
github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg= github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg=
github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec= github.com/hashicorp/terraform-json v0.22.1 h1:xft84GZR0QzjPVWs4lRUwvTcPnegqlyS7orfb5Ltvec=
@ -184,8 +180,8 @@ golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= golang.org/x/mod v0.20.0 h1:utOm6MM3R3dnawAiJgn0y+xvuYRsm1RKM/4giyfDgV0=
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/mod v0.20.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=

View File

@ -6,5 +6,4 @@ const (
Error Severity = iota Error Severity = iota
Warning Warning
Info Info
Recommendation
) )

View File

@ -209,26 +209,7 @@ func TestRepositoryGitConfigWhenNotARepo(t *testing.T) {
} }
func TestRepositoryOriginUrlRemovesUserCreds(t *testing.T) { func TestRepositoryOriginUrlRemovesUserCreds(t *testing.T) {
tcases := []struct { repo := newTestRepository(t)
url string repo.addOriginUrl("https://username:token@github.com/databricks/foobar.git")
expected string repo.assertOriginUrl("https://github.com/databricks/foobar.git")
}{
{
url: "https://username:token@github.com/databricks/foobar.git",
expected: "https://github.com/databricks/foobar.git",
},
{
// Note: The token is still considered and parsed as a username here.
// However credentials integrations by Git providers like GitHub
// allow for setting a PAT token as a username.
url: "https://token@github.com/databricks/foobar.git",
expected: "https://github.com/databricks/foobar.git",
},
}
for _, tc := range tcases {
repo := newTestRepository(t)
repo.addOriginUrl(tc.url)
repo.assertOriginUrl(tc.expected)
}
} }

View File

@ -121,7 +121,7 @@ You can find that job by opening your workpace and clicking on **Workflows**.
You can also deploy to your production target directly from the command-line. You can also deploy to your production target directly from the command-line.
The warehouse, catalog, and schema for that target are configured in databricks.yml. The warehouse, catalog, and schema for that target are configured in databricks.yml.
When deploying to this target, note that the default job at resources/{{.project_name}}.job.yml When deploying to this target, note that the default job at resources/{{.project_name}}_job.yml
has a schedule set that runs every day. The schedule is paused when deploying in development mode has a schedule set that runs every day. The schedule is paused when deploying in development mode
(see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). (see https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -18,7 +18,7 @@ This file only template directives; it is skipped for the actual output.
{{if $notDLT}} {{if $notDLT}}
{{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}} {{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}}
{{skip "{{.project_name}}/resources/{{.project_name}}.pipeline.yml"}} {{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}}
{{end}} {{end}}
{{if $notNotebook}} {{if $notNotebook}}
@ -26,7 +26,7 @@ This file only template directives; it is skipped for the actual output.
{{end}} {{end}}
{{if (and $notDLT $notNotebook $notPython)}} {{if (and $notDLT $notNotebook $notPython)}}
{{skip "{{.project_name}}/resources/{{.project_name}}.job.yml"}} {{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}}
{{else}} {{else}}
{{skip "{{.project_name}}/resources/.gitkeep"}} {{skip "{{.project_name}}/resources/.gitkeep"}}
{{end}} {{end}}

View File

@ -29,7 +29,7 @@ The '{{.project_name}}' project was generated by using the default-python templa
``` ```
Note that the default job from the template has a schedule that runs every day Note that the default job from the template has a schedule that runs every day
(defined in resources/{{.project_name}}.job.yml). The schedule (defined in resources/{{.project_name}}_job.yml). The schedule
is paused when deploying in development mode (see is paused when deploying in development mode (see
https://docs.databricks.com/dev-tools/bundles/deployment-modes.html). https://docs.databricks.com/dev-tools/bundles/deployment-modes.html).

View File

@ -40,7 +40,7 @@ resources:
- task_key: notebook_task - task_key: notebook_task
{{- end}} {{- end}}
pipeline_task: pipeline_task:
{{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}.pipeline.yml */}} {{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}}
pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id} pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id}
{{end -}} {{end -}}
{{- if (eq .include_python "yes") }} {{- if (eq .include_python "yes") }}

View File

@ -14,7 +14,7 @@
"source": [ "source": [
"# DLT pipeline\n", "# DLT pipeline\n",
"\n", "\n",
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}.pipeline.yml." "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml."
] ]
}, },
{ {

View File

@ -14,7 +14,7 @@
"source": [ "source": [
"# Default notebook\n", "# Default notebook\n",
"\n", "\n",
"This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}.job.yml." "This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml."
] ]
}, },
{ {

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml) -- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
USE CATALOG {{"{{"}}catalog{{"}}"}}; USE CATALOG {{"{{"}}catalog{{"}}"}};
USE IDENTIFIER({{"{{"}}schema{{"}}"}}); USE IDENTIFIER({{"{{"}}schema{{"}}"}});

View File

@ -1,4 +1,4 @@
-- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql.job.yml) -- This query is executed using Databricks Workflows (see resources/{{.project_name}}_sql_job.yml)
-- --
-- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/ -- The streaming table below ingests all JSON files in /databricks-datasets/retail-org/sales_orders/
-- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html -- See also https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-create-streaming-table.html