diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ffd6f58dd..303c78553 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -6f6b1371e640f2dfeba72d365ac566368656f6b6 \ No newline at end of file +0c86ea6dbd9a730c24ff0d4e509603e476955ac5 \ No newline at end of file diff --git a/bundle/config/loader/entry_point_test.go b/bundle/config/loader/entry_point_test.go index 406b9b67c..0723c056c 100644 --- a/bundle/config/loader/entry_point_test.go +++ b/bundle/config/loader/entry_point_test.go @@ -18,7 +18,7 @@ func TestEntryPointNoRootPath(t *testing.T) { func TestEntryPoint(t *testing.T) { b := &bundle.Bundle{ - BundleRootPath: "testdata", + BundleRootPath: "testdata/basic", } diags := bundle.Apply(context.Background(), b, loader.EntryPoint()) require.NoError(t, diags.Error()) diff --git a/bundle/config/loader/process_include.go b/bundle/config/loader/process_include.go index 7cf9a17d7..f82f5db1e 100644 --- a/bundle/config/loader/process_include.go +++ b/bundle/config/loader/process_include.go @@ -3,12 +3,135 @@ package loader import ( "context" "fmt" + "slices" + "sort" + "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" ) +func validateFileFormat(configRoot dyn.Value, filePath string) diag.Diagnostics { + for _, resourceDescription := range config.SupportedResources() { + singularName := resourceDescription.SingularName + + for _, yamlExt := range []string{"yml", "yaml"} { + ext := fmt.Sprintf(".%s.%s", singularName, yamlExt) + if strings.HasSuffix(filePath, ext) { + return validateSingleResourceDefined(configRoot, ext, singularName) + } + } + } + + return nil +} + +func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.Diagnostics { + type resource struct { + path dyn.Path + value dyn.Value + typ string + key string + } + + resources := []resource{} + supportedResources := config.SupportedResources() + + // Gather all resources defined in the resources block. + _, err := dyn.MapByPattern( + configRoot, + dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), + func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + // The key for the resource, e.g. "my_job" for jobs.my_job. + k := p[2].Key() + // The type of the resource, e.g. "job" for jobs.my_job. + typ := supportedResources[p[1].Key()].SingularName + + resources = append(resources, resource{path: p, value: v, typ: typ, key: k}) + return v, nil + }) + if err != nil { + return diag.FromErr(err) + } + + // Gather all resources defined in a target block. + _, err = dyn.MapByPattern( + configRoot, + dyn.NewPattern(dyn.Key("targets"), dyn.AnyKey(), dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()), + func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + // The key for the resource, e.g. "my_job" for jobs.my_job. + k := p[4].Key() + // The type of the resource, e.g. "job" for jobs.my_job. + typ := supportedResources[p[3].Key()].SingularName + + resources = append(resources, resource{path: p, value: v, typ: typ, key: k}) + return v, nil + }) + if err != nil { + return diag.FromErr(err) + } + + typeMatch := true + seenKeys := map[string]struct{}{} + for _, rr := range resources { + // case: The resource is not of the correct type. + if rr.typ != typ { + typeMatch = false + break + } + + seenKeys[rr.key] = struct{}{} + } + + // Format matches. There's at most one resource defined in the file. + // The resource is also of the correct type. + if typeMatch && len(seenKeys) <= 1 { + return nil + } + + detail := strings.Builder{} + detail.WriteString("The following resources are defined or configured in this file:\n") + lines := []string{} + for _, r := range resources { + lines = append(lines, fmt.Sprintf(" - %s (%s)\n", r.key, r.typ)) + } + // Sort the lines to print to make the output deterministic. + sort.Strings(lines) + // Compact the lines before writing them to the message to remove any duplicate lines. + // This is needed because we do not dedup earlier when gathering the resources + // and it's valid to define the same resource in both the resources and targets block. + lines = slices.Compact(lines) + for _, l := range lines { + detail.WriteString(l) + } + + locations := []dyn.Location{} + paths := []dyn.Path{} + for _, rr := range resources { + locations = append(locations, rr.value.Locations()...) + paths = append(paths, rr.path) + } + // Sort the locations and paths to make the output deterministic. + sort.Slice(locations, func(i, j int) bool { + return locations[i].String() < locations[j].String() + }) + sort.Slice(paths, func(i, j int) bool { + return paths[i].String() < paths[j].String() + }) + + return diag.Diagnostics{ + { + Severity: diag.Recommendation, + Summary: fmt.Sprintf("define a single %s in a file with the %s extension.", strings.ReplaceAll(typ, "_", " "), ext), + Detail: detail.String(), + Locations: locations, + Paths: paths, + }, + } +} + type processInclude struct { fullPath string relPath string @@ -31,6 +154,13 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos if diags.HasError() { return diags } + + // Add any diagnostics associated with the file format. + diags = append(diags, validateFileFormat(this.Value(), m.relPath)...) + if diags.HasError() { + return diags + } + err := b.Config.Merge(this) if err != nil { diags = diags.Extend(diag.FromErr(err)) diff --git a/bundle/config/loader/process_include_test.go b/bundle/config/loader/process_include_test.go index 2ccd84b31..66c695e17 100644 --- a/bundle/config/loader/process_include_test.go +++ b/bundle/config/loader/process_include_test.go @@ -8,13 +8,15 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/loader" + "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestProcessInclude(t *testing.T) { b := &bundle.Bundle{ - BundleRootPath: "testdata", + BundleRootPath: "testdata/basic", Config: config.Root{ Workspace: config.Workspace{ Host: "foo", @@ -33,3 +35,184 @@ func TestProcessInclude(t *testing.T) { require.NoError(t, diags.Error()) assert.Equal(t, "bar", b.Config.Workspace.Host) } + +func TestProcessIncludeFormatMatch(t *testing.T) { + for _, fileName := range []string{ + "one_job.job.yml", + "one_pipeline.pipeline.yaml", + "two_job.yml", + "job_and_pipeline.yml", + "multiple_resources.yml", + } { + t.Run(fileName, func(t *testing.T) { + b := &bundle.Bundle{ + BundleRootPath: "testdata/format_match", + Config: config.Root{ + Bundle: config.Bundle{ + Name: "format_test", + }, + }, + } + + m := loader.ProcessInclude(filepath.Join(b.BundleRootPath, fileName), fileName) + diags := bundle.Apply(context.Background(), b, m) + assert.Empty(t, diags) + }) + } +} + +func TestProcessIncludeFormatNotMatch(t *testing.T) { + for fileName, expectedDiags := range map[string]diag.Diagnostics{ + "single_job.pipeline.yaml": { + { + Severity: diag.Recommendation, + Summary: "define a single pipeline in a file with the .pipeline.yaml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/single_job.pipeline.yaml"), Line: 11, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/single_job.pipeline.yaml"), Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.jobs.job1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job1"), + }, + }, + }, + "job_and_pipeline.job.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single job in a file with the .job.yml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.job.yml"), Line: 11, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.job.yml"), Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.pipelines.pipeline1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job1"), + }, + }, + }, + "job_and_pipeline.experiment.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single experiment in a file with the .experiment.yml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.experiment.yml"), Line: 11, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.experiment.yml"), Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.pipelines.pipeline1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job1"), + }, + }, + }, + "two_jobs.job.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single job in a file with the .job.yml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/two_jobs.job.yml"), Line: 4, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/two_jobs.job.yml"), Line: 7, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.jobs.job1"), + dyn.MustPathFromString("resources.jobs.job2"), + }, + }, + }, + "second_job_in_target.job.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single job in a file with the .job.yml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/second_job_in_target.job.yml"), Line: 11, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/second_job_in_target.job.yml"), Line: 4, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.jobs.job1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job2"), + }, + }, + }, + "two_jobs_in_target.job.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single job in a file with the .job.yml extension.", + Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n", + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/two_jobs_in_target.job.yml"), Line: 6, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/two_jobs_in_target.job.yml"), Line: 8, Column: 11}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("targets.target1.resources.jobs.job1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job2"), + }, + }, + }, + "multiple_resources.model_serving_endpoint.yml": { + { + Severity: diag.Recommendation, + Summary: "define a single model serving endpoint in a file with the .model_serving_endpoint.yml extension.", + Detail: `The following resources are defined or configured in this file: + - experiment1 (experiment) + - job1 (job) + - job2 (job) + - job3 (job) + - model1 (model) + - model_serving_endpoint1 (model_serving_endpoint) + - pipeline1 (pipeline) + - pipeline2 (pipeline) + - quality_monitor1 (quality_monitor) + - registered_model1 (registered_model) + - schema1 (schema) +`, + Locations: []dyn.Location{ + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 12, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 14, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 18, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 22, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 24, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 28, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 35, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 39, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 43, Column: 11}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 4, Column: 7}, + {File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 8, Column: 7}, + }, + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.experiments.experiment1"), + dyn.MustPathFromString("resources.jobs.job1"), + dyn.MustPathFromString("resources.jobs.job2"), + dyn.MustPathFromString("resources.model_serving_endpoints.model_serving_endpoint1"), + dyn.MustPathFromString("resources.models.model1"), + dyn.MustPathFromString("resources.pipelines.pipeline1"), + dyn.MustPathFromString("resources.pipelines.pipeline2"), + dyn.MustPathFromString("resources.schemas.schema1"), + dyn.MustPathFromString("targets.target1.resources.jobs.job3"), + dyn.MustPathFromString("targets.target1.resources.quality_monitors.quality_monitor1"), + dyn.MustPathFromString("targets.target1.resources.registered_models.registered_model1"), + }, + }, + }, + } { + t.Run(fileName, func(t *testing.T) { + b := &bundle.Bundle{ + BundleRootPath: "testdata/format_not_match", + Config: config.Root{ + Bundle: config.Bundle{ + Name: "format_test", + }, + }, + } + + m := loader.ProcessInclude(filepath.Join(b.BundleRootPath, fileName), fileName) + diags := bundle.Apply(context.Background(), b, m) + require.Len(t, diags, 1) + assert.Equal(t, expectedDiags, diags) + }) + } +} diff --git a/bundle/config/loader/testdata/databricks.yml b/bundle/config/loader/testdata/basic/databricks.yml similarity index 100% rename from bundle/config/loader/testdata/databricks.yml rename to bundle/config/loader/testdata/basic/databricks.yml diff --git a/bundle/config/loader/testdata/host.yml b/bundle/config/loader/testdata/basic/host.yml similarity index 100% rename from bundle/config/loader/testdata/host.yml rename to bundle/config/loader/testdata/basic/host.yml diff --git a/bundle/config/loader/testdata/format_match/job_and_pipeline.yml b/bundle/config/loader/testdata/format_match/job_and_pipeline.yml new file mode 100644 index 000000000..0867fcae4 --- /dev/null +++ b/bundle/config/loader/testdata/format_match/job_and_pipeline.yml @@ -0,0 +1,11 @@ +resources: + pipelines: + pipeline1: + name: pipeline1 + +targets: + target1: + resources: + jobs: + job1: + name: job1 diff --git a/bundle/config/loader/testdata/format_match/multiple_resources.yml b/bundle/config/loader/testdata/format_match/multiple_resources.yml new file mode 100644 index 000000000..dc8e837c6 --- /dev/null +++ b/bundle/config/loader/testdata/format_match/multiple_resources.yml @@ -0,0 +1,43 @@ +resources: + experiments: + experiment1: + name: experiment1 + + model_serving_endpoints: + model_serving_endpoint1: + name: model_serving_endpoint1 + + jobs: + job1: + name: job1 + job2: + name: job2 + + models: + model1: + name: model1 + + pipelines: + pipeline1: + name: pipeline1 + pipeline2: + name: pipeline2 + + schemas: + schema1: + name: schema1 + +targets: + target1: + resources: + quality_monitors: + quality_monitor1: + baseline_table_name: quality_monitor1 + + jobs: + job3: + name: job3 + + registered_models: + registered_model1: + name: registered_model1 diff --git a/bundle/config/loader/testdata/format_match/one_job.job.yml b/bundle/config/loader/testdata/format_match/one_job.job.yml new file mode 100644 index 000000000..91af87cdc --- /dev/null +++ b/bundle/config/loader/testdata/format_match/one_job.job.yml @@ -0,0 +1,11 @@ +resources: + jobs: + job1: + name: job1 + +targets: + target1: + resources: + jobs: + job1: + description: job1 diff --git a/bundle/config/loader/testdata/format_match/one_pipeline.pipeline.yaml b/bundle/config/loader/testdata/format_match/one_pipeline.pipeline.yaml new file mode 100644 index 000000000..85cb0d7fc --- /dev/null +++ b/bundle/config/loader/testdata/format_match/one_pipeline.pipeline.yaml @@ -0,0 +1,4 @@ +resources: + pipelines: + pipeline1: + name: pipeline1 diff --git a/bundle/config/loader/testdata/format_match/two_job.yml b/bundle/config/loader/testdata/format_match/two_job.yml new file mode 100644 index 000000000..81ff90a75 --- /dev/null +++ b/bundle/config/loader/testdata/format_match/two_job.yml @@ -0,0 +1,7 @@ +resources: + jobs: + job1: + name: job1 + + job2: + name: job2 diff --git a/bundle/config/loader/testdata/format_not_match/job_and_pipeline.experiment.yml b/bundle/config/loader/testdata/format_not_match/job_and_pipeline.experiment.yml new file mode 100644 index 000000000..0867fcae4 --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/job_and_pipeline.experiment.yml @@ -0,0 +1,11 @@ +resources: + pipelines: + pipeline1: + name: pipeline1 + +targets: + target1: + resources: + jobs: + job1: + name: job1 diff --git a/bundle/config/loader/testdata/format_not_match/job_and_pipeline.job.yml b/bundle/config/loader/testdata/format_not_match/job_and_pipeline.job.yml new file mode 100644 index 000000000..0867fcae4 --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/job_and_pipeline.job.yml @@ -0,0 +1,11 @@ +resources: + pipelines: + pipeline1: + name: pipeline1 + +targets: + target1: + resources: + jobs: + job1: + name: job1 diff --git a/bundle/config/loader/testdata/format_not_match/multiple_resources.model_serving_endpoint.yml b/bundle/config/loader/testdata/format_not_match/multiple_resources.model_serving_endpoint.yml new file mode 100644 index 000000000..dc8e837c6 --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/multiple_resources.model_serving_endpoint.yml @@ -0,0 +1,43 @@ +resources: + experiments: + experiment1: + name: experiment1 + + model_serving_endpoints: + model_serving_endpoint1: + name: model_serving_endpoint1 + + jobs: + job1: + name: job1 + job2: + name: job2 + + models: + model1: + name: model1 + + pipelines: + pipeline1: + name: pipeline1 + pipeline2: + name: pipeline2 + + schemas: + schema1: + name: schema1 + +targets: + target1: + resources: + quality_monitors: + quality_monitor1: + baseline_table_name: quality_monitor1 + + jobs: + job3: + name: job3 + + registered_models: + registered_model1: + name: registered_model1 diff --git a/bundle/config/loader/testdata/format_not_match/second_job_in_target.job.yml b/bundle/config/loader/testdata/format_not_match/second_job_in_target.job.yml new file mode 100644 index 000000000..628b9879f --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/second_job_in_target.job.yml @@ -0,0 +1,11 @@ +resources: + jobs: + job1: + name: job1 + +targets: + target1: + resources: + jobs: + job2: + name: job2 diff --git a/bundle/config/loader/testdata/format_not_match/single_job.pipeline.yaml b/bundle/config/loader/testdata/format_not_match/single_job.pipeline.yaml new file mode 100644 index 000000000..91af87cdc --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/single_job.pipeline.yaml @@ -0,0 +1,11 @@ +resources: + jobs: + job1: + name: job1 + +targets: + target1: + resources: + jobs: + job1: + description: job1 diff --git a/bundle/config/loader/testdata/format_not_match/two_jobs.job.yml b/bundle/config/loader/testdata/format_not_match/two_jobs.job.yml new file mode 100644 index 000000000..81ff90a75 --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/two_jobs.job.yml @@ -0,0 +1,7 @@ +resources: + jobs: + job1: + name: job1 + + job2: + name: job2 diff --git a/bundle/config/loader/testdata/format_not_match/two_jobs_in_target.job.yml b/bundle/config/loader/testdata/format_not_match/two_jobs_in_target.job.yml new file mode 100644 index 000000000..3b489c1f7 --- /dev/null +++ b/bundle/config/loader/testdata/format_not_match/two_jobs_in_target.job.yml @@ -0,0 +1,8 @@ +targets: + target1: + resources: + jobs: + job1: + description: job1 + job2: + description: job2 diff --git a/bundle/config/resources.go b/bundle/config/resources.go index a3afb7fc3..dc51a7caf 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -59,3 +59,22 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error) return found[0], nil } + +type ResourceDescription struct { + SingularName string +} + +// The keys of the map corresponds to the resource key in the bundle configuration. +func SupportedResources() map[string]ResourceDescription { + return map[string]ResourceDescription{ + "jobs": {SingularName: "job"}, + "pipelines": {SingularName: "pipeline"}, + "models": {SingularName: "model"}, + "experiments": {SingularName: "experiment"}, + "model_serving_endpoints": {SingularName: "model_serving_endpoint"}, + "registered_models": {SingularName: "registered_model"}, + "quality_monitors": {SingularName: "quality_monitor"}, + "schemas": {SingularName: "schema"}, + "clusters": {SingularName: "cluster"}, + } +} diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 6860d73da..c1b76118c 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -3,6 +3,7 @@ package config import ( "encoding/json" "reflect" + "strings" "testing" "github.com/stretchr/testify/assert" @@ -61,3 +62,18 @@ func TestCustomMarshallerIsImplemented(t *testing.T) { }, "Resource %s does not have a custom unmarshaller", field.Name) } } + +func TestSupportedResources(t *testing.T) { + expected := map[string]ResourceDescription{} + typ := reflect.TypeOf(Resources{}) + for i := 0; i < typ.NumField(); i++ { + field := typ.Field(i) + jsonTags := strings.Split(field.Tag.Get("json"), ",") + singularName := strings.TrimSuffix(jsonTags[0], "s") + expected[jsonTags[0]] = ResourceDescription{SingularName: singularName} + } + + // Please add your resource to the SupportedResources() function in resources.go + // if you are adding a new resource. + assert.Equal(t, expected, SupportedResources()) +} diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index e1fad98a3..56387c386 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -56,6 +56,20 @@ const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }} ` +const recommendationTemplate = `{{ "Recommendation" | blue }}: {{ .Summary }} +{{- range $index, $element := .Paths }} + {{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }} +{{- end }} +{{- range $index, $element := .Locations }} + {{ if eq $index 0 }}in {{else}} {{ end}}{{ $element.String | cyan }} +{{- end }} +{{- if .Detail }} + +{{ .Detail }} +{{- end }} + +` + const summaryTemplate = `{{- if .Name -}} Name: {{ .Name | bold }} {{- if .Target }} @@ -94,9 +108,20 @@ func buildTrailer(diags diag.Diagnostics) string { if warnings := len(diags.Filter(diag.Warning)); warnings > 0 { parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings"))) } - if len(parts) > 0 { - return fmt.Sprintf("Found %s", strings.Join(parts, " and ")) - } else { + if recommendations := len(diags.Filter(diag.Recommendation)); recommendations > 0 { + parts = append(parts, color.BlueString(pluralize(recommendations, "recommendation", "recommendations"))) + } + switch { + case len(parts) >= 3: + first := strings.Join(parts[:len(parts)-1], ", ") + last := parts[len(parts)-1] + return fmt.Sprintf("Found %s, and %s", first, last) + case len(parts) == 2: + return fmt.Sprintf("Found %s and %s", parts[0], parts[1]) + case len(parts) == 1: + return fmt.Sprintf("Found %s", parts[0]) + default: + // No diagnostics to print. return color.GreenString("Validation OK!") } } @@ -130,6 +155,7 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate)) warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate)) + recommendationT := template.Must(template.New("recommendation").Funcs(renderFuncMap).Parse(recommendationTemplate)) // Print errors and warnings. for _, d := range diags { @@ -139,6 +165,8 @@ func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) t = errorT case diag.Warning: t = warningT + case diag.Recommendation: + t = recommendationT } for i := range d.Locations { diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index 976f86e79..1a41fa01c 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -45,6 +45,19 @@ func TestRenderTextOutput(t *testing.T) { "\n" + "Found 1 error\n", }, + { + name: "nil bundle and 1 recommendation", + diags: diag.Diagnostics{ + { + Severity: diag.Recommendation, + Summary: "recommendation", + }, + }, + opts: RenderOptions{RenderSummaryTable: true}, + expected: "Recommendation: recommendation\n" + + "\n" + + "Found 1 recommendation\n", + }, { name: "bundle during 'load' and 1 error", bundle: loadingBundle, @@ -84,7 +97,7 @@ func TestRenderTextOutput(t *testing.T) { "Found 2 warnings\n", }, { - name: "bundle during 'load' and 2 errors, 1 warning with details", + name: "bundle during 'load' and 2 errors, 1 warning and 1 recommendation with details", bundle: loadingBundle, diags: diag.Diagnostics{ diag.Diagnostic{ @@ -105,6 +118,12 @@ func TestRenderTextOutput(t *testing.T) { Detail: "detail (3)", Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, }, + diag.Diagnostic{ + Severity: diag.Recommendation, + Summary: "recommendation (4)", + Detail: "detail (4)", + Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}}, + }, }, opts: RenderOptions{RenderSummaryTable: true}, expected: "Error: error (1)\n" + @@ -122,10 +141,114 @@ func TestRenderTextOutput(t *testing.T) { "\n" + "detail (3)\n" + "\n" + + "Recommendation: recommendation (4)\n" + + " in foo.py:4:1\n" + + "\n" + + "detail (4)\n" + + "\n" + "Name: test-bundle\n" + "Target: test-target\n" + "\n" + - "Found 2 errors and 1 warning\n", + "Found 2 errors, 1 warning, and 1 recommendation\n", + }, + { + name: "bundle during 'load' and 1 error and 1 warning", + bundle: loadingBundle, + diags: diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "error (1)", + Detail: "detail (1)", + Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}}, + }, + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "warning (2)", + Detail: "detail (2)", + Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}}, + }, + }, + opts: RenderOptions{RenderSummaryTable: true}, + expected: "Error: error (1)\n" + + " in foo.py:1:1\n" + + "\n" + + "detail (1)\n" + + "\n" + + "Warning: warning (2)\n" + + " in foo.py:2:1\n" + + "\n" + + "detail (2)\n" + + "\n" + + "Name: test-bundle\n" + + "Target: test-target\n" + + "\n" + + "Found 1 error and 1 warning\n", + }, + { + name: "bundle during 'load' and 1 errors, 2 warning and 2 recommendations with details", + bundle: loadingBundle, + diags: diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Error, + Summary: "error (1)", + Detail: "detail (1)", + Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}}, + }, + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "warning (2)", + Detail: "detail (2)", + Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}}, + }, + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "warning (3)", + Detail: "detail (3)", + Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, + }, + diag.Diagnostic{ + Severity: diag.Recommendation, + Summary: "recommendation (4)", + Detail: "detail (4)", + Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}}, + }, + diag.Diagnostic{ + Severity: diag.Recommendation, + Summary: "recommendation (5)", + Detail: "detail (5)", + Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}}, + }, + }, + opts: RenderOptions{RenderSummaryTable: true}, + expected: "Error: error (1)\n" + + " in foo.py:1:1\n" + + "\n" + + "detail (1)\n" + + "\n" + + "Warning: warning (2)\n" + + " in foo.py:2:1\n" + + "\n" + + "detail (2)\n" + + "\n" + + "Warning: warning (3)\n" + + " in foo.py:3:1\n" + + "\n" + + "detail (3)\n" + + "\n" + + "Recommendation: recommendation (4)\n" + + " in foo.py:4:1\n" + + "\n" + + "detail (4)\n" + + "\n" + + "Recommendation: recommendation (5)\n" + + " in foo.py:5:1\n" + + "\n" + + "detail (5)\n" + + "\n" + + "Name: test-bundle\n" + + "Target: test-target\n" + + "\n" + + "Found 1 error, 2 warnings, and 2 recommendations\n", }, { name: "bundle during 'init'", @@ -158,7 +281,7 @@ func TestRenderTextOutput(t *testing.T) { "Validation OK!\n", }, { - name: "nil bundle without summary with 1 error and 1 warning", + name: "nil bundle without summary with 1 error, 1 warning and 1 recommendation", bundle: nil, diags: diag.Diagnostics{ diag.Diagnostic{ @@ -173,6 +296,12 @@ func TestRenderTextOutput(t *testing.T) { Detail: "detail (2)", Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, }, + diag.Diagnostic{ + Severity: diag.Recommendation, + Summary: "recommendation (3)", + Detail: "detail (3)", + Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}}, + }, }, opts: RenderOptions{RenderSummaryTable: false}, expected: "Error: error (1)\n" + @@ -184,6 +313,11 @@ func TestRenderTextOutput(t *testing.T) { " in foo.py:3:1\n" + "\n" + "detail (2)\n" + + "\n" + + "Recommendation: recommendation (3)\n" + + " in foo.py:5:1\n" + + "\n" + + "detail (3)\n" + "\n", }, } @@ -304,6 +438,30 @@ func TestRenderDiagnostics(t *testing.T) { "\n" + "'name' is required\n\n", }, + { + name: "recommendation with multiple paths and locations", + diags: diag.Diagnostics{ + { + Severity: diag.Recommendation, + Summary: "summary", + Detail: "detail", + Paths: []dyn.Path{ + dyn.MustPathFromString("resources.jobs.xxx"), + dyn.MustPathFromString("resources.jobs.yyy"), + }, + Locations: []dyn.Location{ + {File: "foo.yaml", Line: 1, Column: 2}, + {File: "bar.yaml", Line: 3, Column: 4}, + }, + }, + }, + expected: "Recommendation: summary\n" + + " at resources.jobs.xxx\n" + + " resources.jobs.yyy\n" + + " in foo.yaml:1:2\n" + + " bar.yaml:3:4\n\n" + + "detail\n\n", + }, } for _, tc := range testCases { diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index 51772e938..06b9cc15a 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -535,6 +535,10 @@ "description": "Whether Photon is enabled for this pipeline.", "$ref": "#/$defs/bool" }, + "schema": { + "description": "The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.", + "$ref": "#/$defs/string" + }, "serverless": { "description": "Whether serverless compute is enabled for this pipeline.", "$ref": "#/$defs/bool" @@ -2624,7 +2628,7 @@ "type": "object", "properties": { "no_alert_for_skipped_runs": { - "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.", + "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool" }, "on_duration_warning_threshold_exceeded": { @@ -3065,6 +3069,7 @@ "$ref": "#/$defs/map/string" }, "pipeline_params": { + "description": "Controls whether the pipeline should perform a full refresh", "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams" }, "python_named_params": { @@ -3539,7 +3544,7 @@ "type": "object", "properties": { "no_alert_for_skipped_runs": { - "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.", + "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.", "$ref": "#/$defs/bool" }, "on_duration_warning_threshold_exceeded": { diff --git a/cmd/workspace/apps/apps.go b/cmd/workspace/apps/apps.go index baec6d03c..780f55945 100755 --- a/cmd/workspace/apps/apps.go +++ b/cmd/workspace/apps/apps.go @@ -81,6 +81,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `The description of the app.`) + // TODO: array: resources cmd.Use = "create NAME" cmd.Short = `Create an app.` @@ -910,6 +911,7 @@ func newUpdate() *cobra.Command { cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `The description of the app.`) + // TODO: array: resources cmd.Use = "update NAME" cmd.Short = `Update an app.` diff --git a/cmd/workspace/git-credentials/git-credentials.go b/cmd/workspace/git-credentials/git-credentials.go index 2e8cc2cd4..b5082d311 100755 --- a/cmd/workspace/git-credentials/git-credentials.go +++ b/cmd/workspace/git-credentials/git-credentials.go @@ -53,13 +53,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var createOverrides []func( *cobra.Command, - *workspace.CreateCredentials, + *workspace.CreateCredentialsRequest, ) func newCreate() *cobra.Command { cmd := &cobra.Command{} - var createReq workspace.CreateCredentials + var createReq workspace.CreateCredentialsRequest var createJson flags.JsonFlag // TODO: short flags @@ -79,8 +79,9 @@ func newCreate() *cobra.Command { Arguments: GIT_PROVIDER: Git provider. This field is case-insensitive. The available Git providers - are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, - bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit.` + are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, + gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and + awsCodeCommit.` cmd.Annotations = make(map[string]string) @@ -136,13 +137,13 @@ func newCreate() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var deleteOverrides []func( *cobra.Command, - *workspace.DeleteGitCredentialRequest, + *workspace.DeleteCredentialsRequest, ) func newDelete() *cobra.Command { cmd := &cobra.Command{} - var deleteReq workspace.DeleteGitCredentialRequest + var deleteReq workspace.DeleteCredentialsRequest // TODO: short flags @@ -209,13 +210,13 @@ func newDelete() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var getOverrides []func( *cobra.Command, - *workspace.GetGitCredentialRequest, + *workspace.GetCredentialsRequest, ) func newGet() *cobra.Command { cmd := &cobra.Command{} - var getReq workspace.GetGitCredentialRequest + var getReq workspace.GetCredentialsRequest // TODO: short flags @@ -322,33 +323,48 @@ func newList() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var updateOverrides []func( *cobra.Command, - *workspace.UpdateCredentials, + *workspace.UpdateCredentialsRequest, ) func newUpdate() *cobra.Command { cmd := &cobra.Command{} - var updateReq workspace.UpdateCredentials + var updateReq workspace.UpdateCredentialsRequest var updateJson flags.JsonFlag // TODO: short flags cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`) cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `The username or email provided with your Git provider account, depending on which provider you are using.`) cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) - cmd.Use = "update CREDENTIAL_ID" + cmd.Use = "update CREDENTIAL_ID GIT_PROVIDER" cmd.Short = `Update a credential.` cmd.Long = `Update a credential. Updates the specified Git credential. Arguments: - CREDENTIAL_ID: The ID for the corresponding credential to access.` + CREDENTIAL_ID: The ID for the corresponding credential to access. + GIT_PROVIDER: Git provider. This field is case-insensitive. The available Git providers + are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, + gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and + awsCodeCommit.` cmd.Annotations = make(map[string]string) + cmd.Args = func(cmd *cobra.Command, args []string) error { + if cmd.Flags().Changed("json") { + err := root.ExactArgs(1)(cmd, args) + if err != nil { + return fmt.Errorf("when --json flag is specified, provide only CREDENTIAL_ID as positional arguments. Provide 'git_provider' in your JSON input") + } + return nil + } + check := root.ExactArgs(2) + return check(cmd, args) + } + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -360,27 +376,13 @@ func newUpdate() *cobra.Command { return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." - names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding credential to access") - } _, err = fmt.Sscan(args[0], &updateReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) } + if !cmd.Flags().Changed("json") { + updateReq.GitProvider = args[1] + } err = w.GitCredentials.Update(ctx, updateReq) if err != nil { diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 5b4d9645e..ac361e313 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -954,6 +954,7 @@ func newUpdate() *cobra.Command { // TODO: array: notifications cmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) cmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) + cmd.Flags().StringVar(&updateReq.Schema, "schema", updateReq.Schema, `The default schema (database) where tables are read from or published to.`) cmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) cmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) cmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) diff --git a/cmd/workspace/repos/overrides.go b/cmd/workspace/repos/overrides.go index 96d645efb..9546d1c1e 100644 --- a/cmd/workspace/repos/overrides.go +++ b/cmd/workspace/repos/overrides.go @@ -19,7 +19,7 @@ func listOverride(listCmd *cobra.Command, listReq *workspace.ListReposRequest) { {{end}}`) } -func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepo) { +func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepoRequest) { createCmd.Use = "create URL [PROVIDER]" createCmd.Args = func(cmd *cobra.Command, args []string) error { // If the provider argument is not specified, we try to detect it from the URL. @@ -95,7 +95,7 @@ func getOverride(getCmd *cobra.Command, getReq *workspace.GetRepoRequest) { } } -func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepo) { +func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepoRequest) { updateCmd.Use = "update REPO_ID_OR_PATH" updateJson := updateCmd.Flag("json").Value.(*flags.JsonFlag) diff --git a/cmd/workspace/repos/repos.go b/cmd/workspace/repos/repos.go index fb3d51b06..f11dd3ace 100755 --- a/cmd/workspace/repos/repos.go +++ b/cmd/workspace/repos/repos.go @@ -61,13 +61,13 @@ func New() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var createOverrides []func( *cobra.Command, - *workspace.CreateRepo, + *workspace.CreateRepoRequest, ) func newCreate() *cobra.Command { cmd := &cobra.Command{} - var createReq workspace.CreateRepo + var createReq workspace.CreateRepoRequest var createJson flags.JsonFlag // TODO: short flags @@ -87,8 +87,9 @@ func newCreate() *cobra.Command { Arguments: URL: URL of the Git repository to be linked. PROVIDER: Git provider. This field is case-insensitive. The available Git providers - are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, - bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit.` + are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, + gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and + awsCodeCommit.` cmd.Annotations = make(map[string]string) @@ -164,7 +165,7 @@ func newDelete() *cobra.Command { Deletes the specified repo. Arguments: - REPO_ID: The ID for the corresponding repo to access.` + REPO_ID: ID of the Git folder (repo) object in the workspace.` cmd.Annotations = make(map[string]string) @@ -181,14 +182,14 @@ func newDelete() *cobra.Command { if err != nil { return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace") if err != nil { return err } args = append(args, id) } if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") + return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace") } _, err = fmt.Sscan(args[0], &deleteReq.RepoId) if err != nil { @@ -237,7 +238,7 @@ func newGet() *cobra.Command { Returns the repo with the given repo ID. Arguments: - REPO_ID: The ID for the corresponding repo to access.` + REPO_ID: ID of the Git folder (repo) object in the workspace.` cmd.Annotations = make(map[string]string) @@ -254,14 +255,14 @@ func newGet() *cobra.Command { if err != nil { return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace") if err != nil { return err } args = append(args, id) } if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") + return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace") } _, err = fmt.Sscan(args[0], &getReq.RepoId) if err != nil { @@ -451,8 +452,8 @@ func newList() *cobra.Command { cmd.Short = `Get repos.` cmd.Long = `Get repos. - Returns repos that the calling user has Manage permissions on. Results are - paginated with each page containing twenty repos.` + Returns repos that the calling user has Manage permissions on. Use + next_page_token to iterate through additional pages.` cmd.Annotations = make(map[string]string) @@ -569,13 +570,13 @@ func newSetPermissions() *cobra.Command { // Functions can be added from the `init()` function in manually curated files in this directory. var updateOverrides []func( *cobra.Command, - *workspace.UpdateRepo, + *workspace.UpdateRepoRequest, ) func newUpdate() *cobra.Command { cmd := &cobra.Command{} - var updateReq workspace.UpdateRepo + var updateReq workspace.UpdateRepoRequest var updateJson flags.JsonFlag // TODO: short flags @@ -593,7 +594,7 @@ func newUpdate() *cobra.Command { latest commit on the same branch. Arguments: - REPO_ID: The ID for the corresponding repo to access.` + REPO_ID: ID of the Git folder (repo) object in the workspace.` cmd.Annotations = make(map[string]string) @@ -616,14 +617,14 @@ func newUpdate() *cobra.Command { if err != nil { return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace") if err != nil { return err } args = append(args, id) } if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") + return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace") } _, err = fmt.Sscan(args[0], &updateReq.RepoId) if err != nil { diff --git a/go.mod b/go.mod index 9141274c2..697205f33 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.22.7 require ( github.com/Masterminds/semver/v3 v3.3.0 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.47.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.48.0 // Apache 2.0 github.com/fatih/color v1.17.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.6.0 // BSD-3-Clause @@ -27,8 +27,8 @@ require ( golang.org/x/mod v0.21.0 golang.org/x/oauth2 v0.23.0 golang.org/x/sync v0.8.0 - golang.org/x/term v0.24.0 - golang.org/x/text v0.18.0 + golang.org/x/term v0.25.0 + golang.org/x/text v0.19.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -64,7 +64,7 @@ require ( go.opentelemetry.io/otel/trace v1.24.0 // indirect golang.org/x/crypto v0.24.0 // indirect golang.org/x/net v0.26.0 // indirect - golang.org/x/sys v0.25.0 // indirect + golang.org/x/sys v0.26.0 // indirect golang.org/x/time v0.5.0 // indirect google.golang.org/api v0.182.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect diff --git a/go.sum b/go.sum index 177707a50..03698b20a 100644 --- a/go.sum +++ b/go.sum @@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/databricks/databricks-sdk-go v0.47.0 h1:eE7dN9axviL8+s10jnQAayOYDaR+Mfu7E9COGjO4lrQ= -github.com/databricks/databricks-sdk-go v0.47.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= +github.com/databricks/databricks-sdk-go v0.48.0 h1:46KtsnRo+FGhC3izUXbpL0PXBNomvsdignYDhJZlm9s= +github.com/databricks/databricks-sdk-go v0.48.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -212,14 +212,14 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= -golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= -golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= -golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/internal/helpers.go b/internal/helpers.go index 419fa419c..9387706bb 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -519,7 +519,7 @@ func TemporaryRepo(t *testing.T, w *databricks.WorkspaceClient) string { repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-")) t.Logf("Creating repo:%s", repoPath) - repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepo{ + repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{ Url: "https://github.com/databricks/cli", Provider: "github", Path: repoPath, diff --git a/internal/locker_test.go b/internal/locker_test.go index 21e08f732..3ae783d1b 100644 --- a/internal/locker_test.go +++ b/internal/locker_test.go @@ -29,7 +29,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr assert.NoError(t, err) remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix)) - repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepo{ + repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ Path: remoteProjectRoot, Url: EmptyRepoUrl, Provider: "gitHub", diff --git a/internal/repos_test.go b/internal/repos_test.go index de0d926ad..1ad0e8775 100644 --- a/internal/repos_test.go +++ b/internal/repos_test.go @@ -34,7 +34,7 @@ func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ct func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) (int64, string) { repoPath := synthesizeTemporaryRepoPath(t, w, ctx) - repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepo{ + repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{ Path: repoPath, Url: repoUrl, Provider: "gitHub", diff --git a/internal/sync_test.go b/internal/sync_test.go index 4021e6490..6f8b1827b 100644 --- a/internal/sync_test.go +++ b/internal/sync_test.go @@ -38,7 +38,7 @@ func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Contex require.NoError(t, err) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-")) - repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepo{ + repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ Path: repoPath, Url: repoUrl, Provider: "gitHub", diff --git a/libs/diag/severity.go b/libs/diag/severity.go index d25c12806..0e88085f5 100644 --- a/libs/diag/severity.go +++ b/libs/diag/severity.go @@ -6,4 +6,5 @@ const ( Error Severity = iota Warning Info + Recommendation )