Merge remote-tracking branch 'origin' into remove-deprecated

This commit is contained in:
Shreyas Goenka 2024-10-07 17:06:37 +02:00
commit f440f71b61
No known key found for this signature in database
GPG Key ID: 92A07DF49CCB0622
35 changed files with 800 additions and 76 deletions

View File

@ -1 +1 @@
6f6b1371e640f2dfeba72d365ac566368656f6b6 0c86ea6dbd9a730c24ff0d4e509603e476955ac5

View File

@ -18,7 +18,7 @@ func TestEntryPointNoRootPath(t *testing.T) {
func TestEntryPoint(t *testing.T) { func TestEntryPoint(t *testing.T) {
b := &bundle.Bundle{ b := &bundle.Bundle{
BundleRootPath: "testdata", BundleRootPath: "testdata/basic",
} }
diags := bundle.Apply(context.Background(), b, loader.EntryPoint()) diags := bundle.Apply(context.Background(), b, loader.EntryPoint())
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())

View File

@ -3,12 +3,135 @@ package loader
import ( import (
"context" "context"
"fmt" "fmt"
"slices"
"sort"
"strings"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
) )
func validateFileFormat(configRoot dyn.Value, filePath string) diag.Diagnostics {
for _, resourceDescription := range config.SupportedResources() {
singularName := resourceDescription.SingularName
for _, yamlExt := range []string{"yml", "yaml"} {
ext := fmt.Sprintf(".%s.%s", singularName, yamlExt)
if strings.HasSuffix(filePath, ext) {
return validateSingleResourceDefined(configRoot, ext, singularName)
}
}
}
return nil
}
func validateSingleResourceDefined(configRoot dyn.Value, ext, typ string) diag.Diagnostics {
type resource struct {
path dyn.Path
value dyn.Value
typ string
key string
}
resources := []resource{}
supportedResources := config.SupportedResources()
// Gather all resources defined in the resources block.
_, err := dyn.MapByPattern(
configRoot,
dyn.NewPattern(dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// The key for the resource, e.g. "my_job" for jobs.my_job.
k := p[2].Key()
// The type of the resource, e.g. "job" for jobs.my_job.
typ := supportedResources[p[1].Key()].SingularName
resources = append(resources, resource{path: p, value: v, typ: typ, key: k})
return v, nil
})
if err != nil {
return diag.FromErr(err)
}
// Gather all resources defined in a target block.
_, err = dyn.MapByPattern(
configRoot,
dyn.NewPattern(dyn.Key("targets"), dyn.AnyKey(), dyn.Key("resources"), dyn.AnyKey(), dyn.AnyKey()),
func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
// The key for the resource, e.g. "my_job" for jobs.my_job.
k := p[4].Key()
// The type of the resource, e.g. "job" for jobs.my_job.
typ := supportedResources[p[3].Key()].SingularName
resources = append(resources, resource{path: p, value: v, typ: typ, key: k})
return v, nil
})
if err != nil {
return diag.FromErr(err)
}
typeMatch := true
seenKeys := map[string]struct{}{}
for _, rr := range resources {
// case: The resource is not of the correct type.
if rr.typ != typ {
typeMatch = false
break
}
seenKeys[rr.key] = struct{}{}
}
// Format matches. There's at most one resource defined in the file.
// The resource is also of the correct type.
if typeMatch && len(seenKeys) <= 1 {
return nil
}
detail := strings.Builder{}
detail.WriteString("The following resources are defined or configured in this file:\n")
lines := []string{}
for _, r := range resources {
lines = append(lines, fmt.Sprintf(" - %s (%s)\n", r.key, r.typ))
}
// Sort the lines to print to make the output deterministic.
sort.Strings(lines)
// Compact the lines before writing them to the message to remove any duplicate lines.
// This is needed because we do not dedup earlier when gathering the resources
// and it's valid to define the same resource in both the resources and targets block.
lines = slices.Compact(lines)
for _, l := range lines {
detail.WriteString(l)
}
locations := []dyn.Location{}
paths := []dyn.Path{}
for _, rr := range resources {
locations = append(locations, rr.value.Locations()...)
paths = append(paths, rr.path)
}
// Sort the locations and paths to make the output deterministic.
sort.Slice(locations, func(i, j int) bool {
return locations[i].String() < locations[j].String()
})
sort.Slice(paths, func(i, j int) bool {
return paths[i].String() < paths[j].String()
})
return diag.Diagnostics{
{
Severity: diag.Recommendation,
Summary: fmt.Sprintf("define a single %s in a file with the %s extension.", strings.ReplaceAll(typ, "_", " "), ext),
Detail: detail.String(),
Locations: locations,
Paths: paths,
},
}
}
type processInclude struct { type processInclude struct {
fullPath string fullPath string
relPath string relPath string
@ -31,6 +154,13 @@ func (m *processInclude) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
if diags.HasError() { if diags.HasError() {
return diags return diags
} }
// Add any diagnostics associated with the file format.
diags = append(diags, validateFileFormat(this.Value(), m.relPath)...)
if diags.HasError() {
return diags
}
err := b.Config.Merge(this) err := b.Config.Merge(this)
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))

View File

@ -8,13 +8,15 @@ import (
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"
"github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config"
"github.com/databricks/cli/bundle/config/loader" "github.com/databricks/cli/bundle/config/loader"
"github.com/databricks/cli/libs/diag"
"github.com/databricks/cli/libs/dyn"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func TestProcessInclude(t *testing.T) { func TestProcessInclude(t *testing.T) {
b := &bundle.Bundle{ b := &bundle.Bundle{
BundleRootPath: "testdata", BundleRootPath: "testdata/basic",
Config: config.Root{ Config: config.Root{
Workspace: config.Workspace{ Workspace: config.Workspace{
Host: "foo", Host: "foo",
@ -33,3 +35,184 @@ func TestProcessInclude(t *testing.T) {
require.NoError(t, diags.Error()) require.NoError(t, diags.Error())
assert.Equal(t, "bar", b.Config.Workspace.Host) assert.Equal(t, "bar", b.Config.Workspace.Host)
} }
func TestProcessIncludeFormatMatch(t *testing.T) {
for _, fileName := range []string{
"one_job.job.yml",
"one_pipeline.pipeline.yaml",
"two_job.yml",
"job_and_pipeline.yml",
"multiple_resources.yml",
} {
t.Run(fileName, func(t *testing.T) {
b := &bundle.Bundle{
BundleRootPath: "testdata/format_match",
Config: config.Root{
Bundle: config.Bundle{
Name: "format_test",
},
},
}
m := loader.ProcessInclude(filepath.Join(b.BundleRootPath, fileName), fileName)
diags := bundle.Apply(context.Background(), b, m)
assert.Empty(t, diags)
})
}
}
func TestProcessIncludeFormatNotMatch(t *testing.T) {
for fileName, expectedDiags := range map[string]diag.Diagnostics{
"single_job.pipeline.yaml": {
{
Severity: diag.Recommendation,
Summary: "define a single pipeline in a file with the .pipeline.yaml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/single_job.pipeline.yaml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/single_job.pipeline.yaml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
"job_and_pipeline.job.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.job.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.job.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.pipelines.pipeline1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
"job_and_pipeline.experiment.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single experiment in a file with the .experiment.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - pipeline1 (pipeline)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.experiment.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/job_and_pipeline.experiment.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.pipelines.pipeline1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
},
},
},
"two_jobs.job.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/two_jobs.job.yml"), Line: 4, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/two_jobs.job.yml"), Line: 7, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("resources.jobs.job2"),
},
},
},
"second_job_in_target.job.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/second_job_in_target.job.yml"), Line: 11, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/second_job_in_target.job.yml"), Line: 4, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
"two_jobs_in_target.job.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single job in a file with the .job.yml extension.",
Detail: "The following resources are defined or configured in this file:\n - job1 (job)\n - job2 (job)\n",
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/two_jobs_in_target.job.yml"), Line: 6, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/two_jobs_in_target.job.yml"), Line: 8, Column: 11},
},
Paths: []dyn.Path{
dyn.MustPathFromString("targets.target1.resources.jobs.job1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job2"),
},
},
},
"multiple_resources.model_serving_endpoint.yml": {
{
Severity: diag.Recommendation,
Summary: "define a single model serving endpoint in a file with the .model_serving_endpoint.yml extension.",
Detail: `The following resources are defined or configured in this file:
- experiment1 (experiment)
- job1 (job)
- job2 (job)
- job3 (job)
- model1 (model)
- model_serving_endpoint1 (model_serving_endpoint)
- pipeline1 (pipeline)
- pipeline2 (pipeline)
- quality_monitor1 (quality_monitor)
- registered_model1 (registered_model)
- schema1 (schema)
`,
Locations: []dyn.Location{
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 12, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 14, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 18, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 22, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 24, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 28, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 35, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 39, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 43, Column: 11},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 4, Column: 7},
{File: filepath.FromSlash("testdata/format_not_match/multiple_resources.model_serving_endpoint.yml"), Line: 8, Column: 7},
},
Paths: []dyn.Path{
dyn.MustPathFromString("resources.experiments.experiment1"),
dyn.MustPathFromString("resources.jobs.job1"),
dyn.MustPathFromString("resources.jobs.job2"),
dyn.MustPathFromString("resources.model_serving_endpoints.model_serving_endpoint1"),
dyn.MustPathFromString("resources.models.model1"),
dyn.MustPathFromString("resources.pipelines.pipeline1"),
dyn.MustPathFromString("resources.pipelines.pipeline2"),
dyn.MustPathFromString("resources.schemas.schema1"),
dyn.MustPathFromString("targets.target1.resources.jobs.job3"),
dyn.MustPathFromString("targets.target1.resources.quality_monitors.quality_monitor1"),
dyn.MustPathFromString("targets.target1.resources.registered_models.registered_model1"),
},
},
},
} {
t.Run(fileName, func(t *testing.T) {
b := &bundle.Bundle{
BundleRootPath: "testdata/format_not_match",
Config: config.Root{
Bundle: config.Bundle{
Name: "format_test",
},
},
}
m := loader.ProcessInclude(filepath.Join(b.BundleRootPath, fileName), fileName)
diags := bundle.Apply(context.Background(), b, m)
require.Len(t, diags, 1)
assert.Equal(t, expectedDiags, diags)
})
}
}

View File

@ -0,0 +1,11 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -0,0 +1,43 @@
resources:
experiments:
experiment1:
name: experiment1
model_serving_endpoints:
model_serving_endpoint1:
name: model_serving_endpoint1
jobs:
job1:
name: job1
job2:
name: job2
models:
model1:
name: model1
pipelines:
pipeline1:
name: pipeline1
pipeline2:
name: pipeline2
schemas:
schema1:
name: schema1
targets:
target1:
resources:
quality_monitors:
quality_monitor1:
baseline_table_name: quality_monitor1
jobs:
job3:
name: job3
registered_models:
registered_model1:
name: registered_model1

View File

@ -0,0 +1,11 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job1:
description: job1

View File

@ -0,0 +1,4 @@
resources:
pipelines:
pipeline1:
name: pipeline1

View File

@ -0,0 +1,7 @@
resources:
jobs:
job1:
name: job1
job2:
name: job2

View File

@ -0,0 +1,11 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -0,0 +1,11 @@
resources:
pipelines:
pipeline1:
name: pipeline1
targets:
target1:
resources:
jobs:
job1:
name: job1

View File

@ -0,0 +1,43 @@
resources:
experiments:
experiment1:
name: experiment1
model_serving_endpoints:
model_serving_endpoint1:
name: model_serving_endpoint1
jobs:
job1:
name: job1
job2:
name: job2
models:
model1:
name: model1
pipelines:
pipeline1:
name: pipeline1
pipeline2:
name: pipeline2
schemas:
schema1:
name: schema1
targets:
target1:
resources:
quality_monitors:
quality_monitor1:
baseline_table_name: quality_monitor1
jobs:
job3:
name: job3
registered_models:
registered_model1:
name: registered_model1

View File

@ -0,0 +1,11 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job2:
name: job2

View File

@ -0,0 +1,11 @@
resources:
jobs:
job1:
name: job1
targets:
target1:
resources:
jobs:
job1:
description: job1

View File

@ -0,0 +1,7 @@
resources:
jobs:
job1:
name: job1
job2:
name: job2

View File

@ -0,0 +1,8 @@
targets:
target1:
resources:
jobs:
job1:
description: job1
job2:
description: job2

View File

@ -59,3 +59,22 @@ func (r *Resources) FindResourceByConfigKey(key string) (ConfigResource, error)
return found[0], nil return found[0], nil
} }
type ResourceDescription struct {
SingularName string
}
// The keys of the map corresponds to the resource key in the bundle configuration.
func SupportedResources() map[string]ResourceDescription {
return map[string]ResourceDescription{
"jobs": {SingularName: "job"},
"pipelines": {SingularName: "pipeline"},
"models": {SingularName: "model"},
"experiments": {SingularName: "experiment"},
"model_serving_endpoints": {SingularName: "model_serving_endpoint"},
"registered_models": {SingularName: "registered_model"},
"quality_monitors": {SingularName: "quality_monitor"},
"schemas": {SingularName: "schema"},
"clusters": {SingularName: "cluster"},
}
}

View File

@ -3,6 +3,7 @@ package config
import ( import (
"encoding/json" "encoding/json"
"reflect" "reflect"
"strings"
"testing" "testing"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
@ -61,3 +62,18 @@ func TestCustomMarshallerIsImplemented(t *testing.T) {
}, "Resource %s does not have a custom unmarshaller", field.Name) }, "Resource %s does not have a custom unmarshaller", field.Name)
} }
} }
func TestSupportedResources(t *testing.T) {
expected := map[string]ResourceDescription{}
typ := reflect.TypeOf(Resources{})
for i := 0; i < typ.NumField(); i++ {
field := typ.Field(i)
jsonTags := strings.Split(field.Tag.Get("json"), ",")
singularName := strings.TrimSuffix(jsonTags[0], "s")
expected[jsonTags[0]] = ResourceDescription{SingularName: singularName}
}
// Please add your resource to the SupportedResources() function in resources.go
// if you are adding a new resource.
assert.Equal(t, expected, SupportedResources())
}

View File

@ -56,6 +56,20 @@ const warningTemplate = `{{ "Warning" | yellow }}: {{ .Summary }}
` `
const recommendationTemplate = `{{ "Recommendation" | blue }}: {{ .Summary }}
{{- range $index, $element := .Paths }}
{{ if eq $index 0 }}at {{else}} {{ end}}{{ $element.String | green }}
{{- end }}
{{- range $index, $element := .Locations }}
{{ if eq $index 0 }}in {{else}} {{ end}}{{ $element.String | cyan }}
{{- end }}
{{- if .Detail }}
{{ .Detail }}
{{- end }}
`
const summaryTemplate = `{{- if .Name -}} const summaryTemplate = `{{- if .Name -}}
Name: {{ .Name | bold }} Name: {{ .Name | bold }}
{{- if .Target }} {{- if .Target }}
@ -94,9 +108,20 @@ func buildTrailer(diags diag.Diagnostics) string {
if warnings := len(diags.Filter(diag.Warning)); warnings > 0 { if warnings := len(diags.Filter(diag.Warning)); warnings > 0 {
parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings"))) parts = append(parts, color.YellowString(pluralize(warnings, "warning", "warnings")))
} }
if len(parts) > 0 { if recommendations := len(diags.Filter(diag.Recommendation)); recommendations > 0 {
return fmt.Sprintf("Found %s", strings.Join(parts, " and ")) parts = append(parts, color.BlueString(pluralize(recommendations, "recommendation", "recommendations")))
} else { }
switch {
case len(parts) >= 3:
first := strings.Join(parts[:len(parts)-1], ", ")
last := parts[len(parts)-1]
return fmt.Sprintf("Found %s, and %s", first, last)
case len(parts) == 2:
return fmt.Sprintf("Found %s and %s", parts[0], parts[1])
case len(parts) == 1:
return fmt.Sprintf("Found %s", parts[0])
default:
// No diagnostics to print.
return color.GreenString("Validation OK!") return color.GreenString("Validation OK!")
} }
} }
@ -130,6 +155,7 @@ func renderSummaryTemplate(out io.Writer, b *bundle.Bundle, diags diag.Diagnosti
func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error { func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics) error {
errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate)) errorT := template.Must(template.New("error").Funcs(renderFuncMap).Parse(errorTemplate))
warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate)) warningT := template.Must(template.New("warning").Funcs(renderFuncMap).Parse(warningTemplate))
recommendationT := template.Must(template.New("recommendation").Funcs(renderFuncMap).Parse(recommendationTemplate))
// Print errors and warnings. // Print errors and warnings.
for _, d := range diags { for _, d := range diags {
@ -139,6 +165,8 @@ func renderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics)
t = errorT t = errorT
case diag.Warning: case diag.Warning:
t = warningT t = warningT
case diag.Recommendation:
t = recommendationT
} }
for i := range d.Locations { for i := range d.Locations {

View File

@ -45,6 +45,19 @@ func TestRenderTextOutput(t *testing.T) {
"\n" + "\n" +
"Found 1 error\n", "Found 1 error\n",
}, },
{
name: "nil bundle and 1 recommendation",
diags: diag.Diagnostics{
{
Severity: diag.Recommendation,
Summary: "recommendation",
},
},
opts: RenderOptions{RenderSummaryTable: true},
expected: "Recommendation: recommendation\n" +
"\n" +
"Found 1 recommendation\n",
},
{ {
name: "bundle during 'load' and 1 error", name: "bundle during 'load' and 1 error",
bundle: loadingBundle, bundle: loadingBundle,
@ -84,7 +97,7 @@ func TestRenderTextOutput(t *testing.T) {
"Found 2 warnings\n", "Found 2 warnings\n",
}, },
{ {
name: "bundle during 'load' and 2 errors, 1 warning with details", name: "bundle during 'load' and 2 errors, 1 warning and 1 recommendation with details",
bundle: loadingBundle, bundle: loadingBundle,
diags: diag.Diagnostics{ diags: diag.Diagnostics{
diag.Diagnostic{ diag.Diagnostic{
@ -105,6 +118,12 @@ func TestRenderTextOutput(t *testing.T) {
Detail: "detail (3)", Detail: "detail (3)",
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
}, },
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (4)",
Detail: "detail (4)",
Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}},
},
}, },
opts: RenderOptions{RenderSummaryTable: true}, opts: RenderOptions{RenderSummaryTable: true},
expected: "Error: error (1)\n" + expected: "Error: error (1)\n" +
@ -122,10 +141,114 @@ func TestRenderTextOutput(t *testing.T) {
"\n" + "\n" +
"detail (3)\n" + "detail (3)\n" +
"\n" + "\n" +
"Recommendation: recommendation (4)\n" +
" in foo.py:4:1\n" +
"\n" +
"detail (4)\n" +
"\n" +
"Name: test-bundle\n" + "Name: test-bundle\n" +
"Target: test-target\n" + "Target: test-target\n" +
"\n" + "\n" +
"Found 2 errors and 1 warning\n", "Found 2 errors, 1 warning, and 1 recommendation\n",
},
{
name: "bundle during 'load' and 1 error and 1 warning",
bundle: loadingBundle,
diags: diag.Diagnostics{
diag.Diagnostic{
Severity: diag.Error,
Summary: "error (1)",
Detail: "detail (1)",
Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (2)",
Detail: "detail (2)",
Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}},
},
},
opts: RenderOptions{RenderSummaryTable: true},
expected: "Error: error (1)\n" +
" in foo.py:1:1\n" +
"\n" +
"detail (1)\n" +
"\n" +
"Warning: warning (2)\n" +
" in foo.py:2:1\n" +
"\n" +
"detail (2)\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 1 error and 1 warning\n",
},
{
name: "bundle during 'load' and 1 errors, 2 warning and 2 recommendations with details",
bundle: loadingBundle,
diags: diag.Diagnostics{
diag.Diagnostic{
Severity: diag.Error,
Summary: "error (1)",
Detail: "detail (1)",
Locations: []dyn.Location{{File: "foo.py", Line: 1, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (2)",
Detail: "detail (2)",
Locations: []dyn.Location{{File: "foo.py", Line: 2, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Warning,
Summary: "warning (3)",
Detail: "detail (3)",
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (4)",
Detail: "detail (4)",
Locations: []dyn.Location{{File: "foo.py", Line: 4, Column: 1}},
},
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (5)",
Detail: "detail (5)",
Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}},
},
},
opts: RenderOptions{RenderSummaryTable: true},
expected: "Error: error (1)\n" +
" in foo.py:1:1\n" +
"\n" +
"detail (1)\n" +
"\n" +
"Warning: warning (2)\n" +
" in foo.py:2:1\n" +
"\n" +
"detail (2)\n" +
"\n" +
"Warning: warning (3)\n" +
" in foo.py:3:1\n" +
"\n" +
"detail (3)\n" +
"\n" +
"Recommendation: recommendation (4)\n" +
" in foo.py:4:1\n" +
"\n" +
"detail (4)\n" +
"\n" +
"Recommendation: recommendation (5)\n" +
" in foo.py:5:1\n" +
"\n" +
"detail (5)\n" +
"\n" +
"Name: test-bundle\n" +
"Target: test-target\n" +
"\n" +
"Found 1 error, 2 warnings, and 2 recommendations\n",
}, },
{ {
name: "bundle during 'init'", name: "bundle during 'init'",
@ -158,7 +281,7 @@ func TestRenderTextOutput(t *testing.T) {
"Validation OK!\n", "Validation OK!\n",
}, },
{ {
name: "nil bundle without summary with 1 error and 1 warning", name: "nil bundle without summary with 1 error, 1 warning and 1 recommendation",
bundle: nil, bundle: nil,
diags: diag.Diagnostics{ diags: diag.Diagnostics{
diag.Diagnostic{ diag.Diagnostic{
@ -173,6 +296,12 @@ func TestRenderTextOutput(t *testing.T) {
Detail: "detail (2)", Detail: "detail (2)",
Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}}, Locations: []dyn.Location{{File: "foo.py", Line: 3, Column: 1}},
}, },
diag.Diagnostic{
Severity: diag.Recommendation,
Summary: "recommendation (3)",
Detail: "detail (3)",
Locations: []dyn.Location{{File: "foo.py", Line: 5, Column: 1}},
},
}, },
opts: RenderOptions{RenderSummaryTable: false}, opts: RenderOptions{RenderSummaryTable: false},
expected: "Error: error (1)\n" + expected: "Error: error (1)\n" +
@ -184,6 +313,11 @@ func TestRenderTextOutput(t *testing.T) {
" in foo.py:3:1\n" + " in foo.py:3:1\n" +
"\n" + "\n" +
"detail (2)\n" + "detail (2)\n" +
"\n" +
"Recommendation: recommendation (3)\n" +
" in foo.py:5:1\n" +
"\n" +
"detail (3)\n" +
"\n", "\n",
}, },
} }
@ -304,6 +438,30 @@ func TestRenderDiagnostics(t *testing.T) {
"\n" + "\n" +
"'name' is required\n\n", "'name' is required\n\n",
}, },
{
name: "recommendation with multiple paths and locations",
diags: diag.Diagnostics{
{
Severity: diag.Recommendation,
Summary: "summary",
Detail: "detail",
Paths: []dyn.Path{
dyn.MustPathFromString("resources.jobs.xxx"),
dyn.MustPathFromString("resources.jobs.yyy"),
},
Locations: []dyn.Location{
{File: "foo.yaml", Line: 1, Column: 2},
{File: "bar.yaml", Line: 3, Column: 4},
},
},
},
expected: "Recommendation: summary\n" +
" at resources.jobs.xxx\n" +
" resources.jobs.yyy\n" +
" in foo.yaml:1:2\n" +
" bar.yaml:3:4\n\n" +
"detail\n\n",
},
} }
for _, tc := range testCases { for _, tc := range testCases {

View File

@ -535,6 +535,10 @@
"description": "Whether Photon is enabled for this pipeline.", "description": "Whether Photon is enabled for this pipeline.",
"$ref": "#/$defs/bool" "$ref": "#/$defs/bool"
}, },
"schema": {
"description": "The default schema (database) where tables are read from or published to. The presence of this field implies that the pipeline is in direct publishing mode.",
"$ref": "#/$defs/string"
},
"serverless": { "serverless": {
"description": "Whether serverless compute is enabled for this pipeline.", "description": "Whether serverless compute is enabled for this pipeline.",
"$ref": "#/$defs/bool" "$ref": "#/$defs/bool"
@ -2624,7 +2628,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"no_alert_for_skipped_runs": { "no_alert_for_skipped_runs": {
"description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.", "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.",
"$ref": "#/$defs/bool" "$ref": "#/$defs/bool"
}, },
"on_duration_warning_threshold_exceeded": { "on_duration_warning_threshold_exceeded": {
@ -3065,6 +3069,7 @@
"$ref": "#/$defs/map/string" "$ref": "#/$defs/map/string"
}, },
"pipeline_params": { "pipeline_params": {
"description": "Controls whether the pipeline should perform a full refresh",
"$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams" "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/jobs.PipelineParams"
}, },
"python_named_params": { "python_named_params": {
@ -3539,7 +3544,7 @@
"type": "object", "type": "object",
"properties": { "properties": {
"no_alert_for_skipped_runs": { "no_alert_for_skipped_runs": {
"description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.", "description": "If true, do not send email to recipients specified in `on_failure` if the run is skipped.\nThis field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.",
"$ref": "#/$defs/bool" "$ref": "#/$defs/bool"
}, },
"on_duration_warning_threshold_exceeded": { "on_duration_warning_threshold_exceeded": {

View File

@ -81,6 +81,7 @@ func newCreate() *cobra.Command {
cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `The description of the app.`) cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `The description of the app.`)
// TODO: array: resources
cmd.Use = "create NAME" cmd.Use = "create NAME"
cmd.Short = `Create an app.` cmd.Short = `Create an app.`
@ -910,6 +911,7 @@ func newUpdate() *cobra.Command {
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `The description of the app.`) cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `The description of the app.`)
// TODO: array: resources
cmd.Use = "update NAME" cmd.Use = "update NAME"
cmd.Short = `Update an app.` cmd.Short = `Update an app.`

View File

@ -53,13 +53,13 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func( var createOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.CreateCredentials, *workspace.CreateCredentialsRequest,
) )
func newCreate() *cobra.Command { func newCreate() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var createReq workspace.CreateCredentials var createReq workspace.CreateCredentialsRequest
var createJson flags.JsonFlag var createJson flags.JsonFlag
// TODO: short flags // TODO: short flags
@ -79,8 +79,9 @@ func newCreate() *cobra.Command {
Arguments: Arguments:
GIT_PROVIDER: Git provider. This field is case-insensitive. The available Git providers GIT_PROVIDER: Git provider. This field is case-insensitive. The available Git providers
are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit.` gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
awsCodeCommit.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -136,13 +137,13 @@ func newCreate() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var deleteOverrides []func( var deleteOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.DeleteGitCredentialRequest, *workspace.DeleteCredentialsRequest,
) )
func newDelete() *cobra.Command { func newDelete() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var deleteReq workspace.DeleteGitCredentialRequest var deleteReq workspace.DeleteCredentialsRequest
// TODO: short flags // TODO: short flags
@ -209,13 +210,13 @@ func newDelete() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var getOverrides []func( var getOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.GetGitCredentialRequest, *workspace.GetCredentialsRequest,
) )
func newGet() *cobra.Command { func newGet() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var getReq workspace.GetGitCredentialRequest var getReq workspace.GetCredentialsRequest
// TODO: short flags // TODO: short flags
@ -322,33 +323,48 @@ func newList() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func( var updateOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.UpdateCredentials, *workspace.UpdateCredentialsRequest,
) )
func newUpdate() *cobra.Command { func newUpdate() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var updateReq workspace.UpdateCredentials var updateReq workspace.UpdateCredentialsRequest
var updateJson flags.JsonFlag var updateJson flags.JsonFlag
// TODO: short flags // TODO: short flags
cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`)
cmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`)
cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `The username or email provided with your Git provider account, depending on which provider you are using.`) cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `The username or email provided with your Git provider account, depending on which provider you are using.`)
cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`)
cmd.Use = "update CREDENTIAL_ID" cmd.Use = "update CREDENTIAL_ID GIT_PROVIDER"
cmd.Short = `Update a credential.` cmd.Short = `Update a credential.`
cmd.Long = `Update a credential. cmd.Long = `Update a credential.
Updates the specified Git credential. Updates the specified Git credential.
Arguments: Arguments:
CREDENTIAL_ID: The ID for the corresponding credential to access.` CREDENTIAL_ID: The ID for the corresponding credential to access.
GIT_PROVIDER: Git provider. This field is case-insensitive. The available Git providers
are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
awsCodeCommit.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
cmd.Args = func(cmd *cobra.Command, args []string) error {
if cmd.Flags().Changed("json") {
err := root.ExactArgs(1)(cmd, args)
if err != nil {
return fmt.Errorf("when --json flag is specified, provide only CREDENTIAL_ID as positional arguments. Provide 'git_provider' in your JSON input")
}
return nil
}
check := root.ExactArgs(2)
return check(cmd, args)
}
cmd.PreRunE = root.MustWorkspaceClient cmd.PreRunE = root.MustWorkspaceClient
cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { cmd.RunE = func(cmd *cobra.Command, args []string) (err error) {
ctx := cmd.Context() ctx := cmd.Context()
@ -360,27 +376,13 @@ func newUpdate() *cobra.Command {
return err return err
} }
} }
if len(args) == 0 {
promptSpinner := cmdio.Spinner(ctx)
promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down."
names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx)
close(promptSpinner)
if err != nil {
return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err)
}
id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access")
if err != nil {
return err
}
args = append(args, id)
}
if len(args) != 1 {
return fmt.Errorf("expected to have the id for the corresponding credential to access")
}
_, err = fmt.Sscan(args[0], &updateReq.CredentialId) _, err = fmt.Sscan(args[0], &updateReq.CredentialId)
if err != nil { if err != nil {
return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0])
} }
if !cmd.Flags().Changed("json") {
updateReq.GitProvider = args[1]
}
err = w.GitCredentials.Update(ctx, updateReq) err = w.GitCredentials.Update(ctx, updateReq)
if err != nil { if err != nil {

View File

@ -954,6 +954,7 @@ func newUpdate() *cobra.Command {
// TODO: array: notifications // TODO: array: notifications
cmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) cmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`)
cmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) cmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`)
cmd.Flags().StringVar(&updateReq.Schema, "schema", updateReq.Schema, `The default schema (database) where tables are read from or published to.`)
cmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) cmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`)
cmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) cmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`)
cmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) cmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`)

View File

@ -19,7 +19,7 @@ func listOverride(listCmd *cobra.Command, listReq *workspace.ListReposRequest) {
{{end}}`) {{end}}`)
} }
func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepo) { func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepoRequest) {
createCmd.Use = "create URL [PROVIDER]" createCmd.Use = "create URL [PROVIDER]"
createCmd.Args = func(cmd *cobra.Command, args []string) error { createCmd.Args = func(cmd *cobra.Command, args []string) error {
// If the provider argument is not specified, we try to detect it from the URL. // If the provider argument is not specified, we try to detect it from the URL.
@ -95,7 +95,7 @@ func getOverride(getCmd *cobra.Command, getReq *workspace.GetRepoRequest) {
} }
} }
func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepo) { func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepoRequest) {
updateCmd.Use = "update REPO_ID_OR_PATH" updateCmd.Use = "update REPO_ID_OR_PATH"
updateJson := updateCmd.Flag("json").Value.(*flags.JsonFlag) updateJson := updateCmd.Flag("json").Value.(*flags.JsonFlag)

View File

@ -61,13 +61,13 @@ func New() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var createOverrides []func( var createOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.CreateRepo, *workspace.CreateRepoRequest,
) )
func newCreate() *cobra.Command { func newCreate() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var createReq workspace.CreateRepo var createReq workspace.CreateRepoRequest
var createJson flags.JsonFlag var createJson flags.JsonFlag
// TODO: short flags // TODO: short flags
@ -87,8 +87,9 @@ func newCreate() *cobra.Command {
Arguments: Arguments:
URL: URL of the Git repository to be linked. URL: URL of the Git repository to be linked.
PROVIDER: Git provider. This field is case-insensitive. The available Git providers PROVIDER: Git provider. This field is case-insensitive. The available Git providers
are gitHub, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, are gitHub, bitbucketCloud, gitLab, azureDevOpsServices,
bitbucketServer, gitLabEnterpriseEdition and awsCodeCommit.` gitHubEnterprise, bitbucketServer, gitLabEnterpriseEdition and
awsCodeCommit.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -164,7 +165,7 @@ func newDelete() *cobra.Command {
Deletes the specified repo. Deletes the specified repo.
Arguments: Arguments:
REPO_ID: The ID for the corresponding repo to access.` REPO_ID: ID of the Git folder (repo) object in the workspace.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -181,14 +182,14 @@ func newDelete() *cobra.Command {
if err != nil { if err != nil {
return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err)
} }
id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace")
if err != nil { if err != nil {
return err return err
} }
args = append(args, id) args = append(args, id)
} }
if len(args) != 1 { if len(args) != 1 {
return fmt.Errorf("expected to have the id for the corresponding repo to access") return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace")
} }
_, err = fmt.Sscan(args[0], &deleteReq.RepoId) _, err = fmt.Sscan(args[0], &deleteReq.RepoId)
if err != nil { if err != nil {
@ -237,7 +238,7 @@ func newGet() *cobra.Command {
Returns the repo with the given repo ID. Returns the repo with the given repo ID.
Arguments: Arguments:
REPO_ID: The ID for the corresponding repo to access.` REPO_ID: ID of the Git folder (repo) object in the workspace.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -254,14 +255,14 @@ func newGet() *cobra.Command {
if err != nil { if err != nil {
return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err)
} }
id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace")
if err != nil { if err != nil {
return err return err
} }
args = append(args, id) args = append(args, id)
} }
if len(args) != 1 { if len(args) != 1 {
return fmt.Errorf("expected to have the id for the corresponding repo to access") return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace")
} }
_, err = fmt.Sscan(args[0], &getReq.RepoId) _, err = fmt.Sscan(args[0], &getReq.RepoId)
if err != nil { if err != nil {
@ -451,8 +452,8 @@ func newList() *cobra.Command {
cmd.Short = `Get repos.` cmd.Short = `Get repos.`
cmd.Long = `Get repos. cmd.Long = `Get repos.
Returns repos that the calling user has Manage permissions on. Results are Returns repos that the calling user has Manage permissions on. Use
paginated with each page containing twenty repos.` next_page_token to iterate through additional pages.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -569,13 +570,13 @@ func newSetPermissions() *cobra.Command {
// Functions can be added from the `init()` function in manually curated files in this directory. // Functions can be added from the `init()` function in manually curated files in this directory.
var updateOverrides []func( var updateOverrides []func(
*cobra.Command, *cobra.Command,
*workspace.UpdateRepo, *workspace.UpdateRepoRequest,
) )
func newUpdate() *cobra.Command { func newUpdate() *cobra.Command {
cmd := &cobra.Command{} cmd := &cobra.Command{}
var updateReq workspace.UpdateRepo var updateReq workspace.UpdateRepoRequest
var updateJson flags.JsonFlag var updateJson flags.JsonFlag
// TODO: short flags // TODO: short flags
@ -593,7 +594,7 @@ func newUpdate() *cobra.Command {
latest commit on the same branch. latest commit on the same branch.
Arguments: Arguments:
REPO_ID: The ID for the corresponding repo to access.` REPO_ID: ID of the Git folder (repo) object in the workspace.`
cmd.Annotations = make(map[string]string) cmd.Annotations = make(map[string]string)
@ -616,14 +617,14 @@ func newUpdate() *cobra.Command {
if err != nil { if err != nil {
return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err)
} }
id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") id, err := cmdio.Select(ctx, names, "ID of the Git folder (repo) object in the workspace")
if err != nil { if err != nil {
return err return err
} }
args = append(args, id) args = append(args, id)
} }
if len(args) != 1 { if len(args) != 1 {
return fmt.Errorf("expected to have the id for the corresponding repo to access") return fmt.Errorf("expected to have id of the git folder (repo) object in the workspace")
} }
_, err = fmt.Sscan(args[0], &updateReq.RepoId) _, err = fmt.Sscan(args[0], &updateReq.RepoId)
if err != nil { if err != nil {

8
go.mod
View File

@ -7,7 +7,7 @@ toolchain go1.22.7
require ( require (
github.com/Masterminds/semver/v3 v3.3.0 // MIT github.com/Masterminds/semver/v3 v3.3.0 // MIT
github.com/briandowns/spinner v1.23.1 // Apache 2.0 github.com/briandowns/spinner v1.23.1 // Apache 2.0
github.com/databricks/databricks-sdk-go v0.47.0 // Apache 2.0 github.com/databricks/databricks-sdk-go v0.48.0 // Apache 2.0
github.com/fatih/color v1.17.0 // MIT github.com/fatih/color v1.17.0 // MIT
github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/ghodss/yaml v1.0.0 // MIT + NOTICE
github.com/google/uuid v1.6.0 // BSD-3-Clause github.com/google/uuid v1.6.0 // BSD-3-Clause
@ -27,8 +27,8 @@ require (
golang.org/x/mod v0.21.0 golang.org/x/mod v0.21.0
golang.org/x/oauth2 v0.23.0 golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0 golang.org/x/sync v0.8.0
golang.org/x/term v0.24.0 golang.org/x/term v0.25.0
golang.org/x/text v0.18.0 golang.org/x/text v0.19.0
gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0
gopkg.in/yaml.v3 v3.0.1 gopkg.in/yaml.v3 v3.0.1
) )
@ -64,7 +64,7 @@ require (
go.opentelemetry.io/otel/trace v1.24.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect
golang.org/x/crypto v0.24.0 // indirect golang.org/x/crypto v0.24.0 // indirect
golang.org/x/net v0.26.0 // indirect golang.org/x/net v0.26.0 // indirect
golang.org/x/sys v0.25.0 // indirect golang.org/x/sys v0.26.0 // indirect
golang.org/x/time v0.5.0 // indirect golang.org/x/time v0.5.0 // indirect
google.golang.org/api v0.182.0 // indirect google.golang.org/api v0.182.0 // indirect
google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect

16
go.sum generated
View File

@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX
github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg=
github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4=
github.com/databricks/databricks-sdk-go v0.47.0 h1:eE7dN9axviL8+s10jnQAayOYDaR+Mfu7E9COGjO4lrQ= github.com/databricks/databricks-sdk-go v0.48.0 h1:46KtsnRo+FGhC3izUXbpL0PXBNomvsdignYDhJZlm9s=
github.com/databricks/databricks-sdk-go v0.47.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/databricks/databricks-sdk-go v0.48.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@ -212,14 +212,14 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo=
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM= golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24=
golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8= golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224= golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM=
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk=
golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

View File

@ -519,7 +519,7 @@ func TemporaryRepo(t *testing.T, w *databricks.WorkspaceClient) string {
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-")) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-"))
t.Logf("Creating repo:%s", repoPath) t.Logf("Creating repo:%s", repoPath)
repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepo{ repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{
Url: "https://github.com/databricks/cli", Url: "https://github.com/databricks/cli",
Provider: "github", Provider: "github",
Path: repoPath, Path: repoPath,

View File

@ -29,7 +29,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr
assert.NoError(t, err) assert.NoError(t, err)
remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix)) remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepo{ repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: remoteProjectRoot, Path: remoteProjectRoot,
Url: EmptyRepoUrl, Url: EmptyRepoUrl,
Provider: "gitHub", Provider: "gitHub",

View File

@ -34,7 +34,7 @@ func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ct
func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) (int64, string) { func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) (int64, string) {
repoPath := synthesizeTemporaryRepoPath(t, w, ctx) repoPath := synthesizeTemporaryRepoPath(t, w, ctx)
repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepo{ repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: repoPath, Path: repoPath,
Url: repoUrl, Url: repoUrl,
Provider: "gitHub", Provider: "gitHub",

View File

@ -38,7 +38,7 @@ func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Contex
require.NoError(t, err) require.NoError(t, err)
repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-")) repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-"))
repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepo{ repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{
Path: repoPath, Path: repoPath,
Url: repoUrl, Url: repoUrl,
Provider: "gitHub", Provider: "gitHub",

View File

@ -6,4 +6,5 @@ const (
Error Severity = iota Error Severity = iota
Warning Warning
Info Info
Recommendation
) )