Enable gofumpt and goimports in golangci-lint (#1999)

## Changes
Enable gofumpt and goimports in golangci-lint and apply autofix.

This makes 'make fmt' redundant, will be cleaned up in follow up diff.

## Tests
Existing tests.
This commit is contained in:
Denis Bilenko 2024-12-12 10:28:42 +01:00 committed by GitHub
parent 592474880d
commit 2e018cfaec
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
197 changed files with 554 additions and 565 deletions

View File

@ -9,6 +9,8 @@ linters:
- staticcheck - staticcheck
- unused - unused
- gofmt - gofmt
- gofumpt
- goimports
linters-settings: linters-settings:
govet: govet:
enable-all: true enable-all: true
@ -27,5 +29,10 @@ linters-settings:
- (*github.com/spf13/cobra.Command).MarkFlagRequired - (*github.com/spf13/cobra.Command).MarkFlagRequired
- (*github.com/spf13/pflag.FlagSet).MarkDeprecated - (*github.com/spf13/pflag.FlagSet).MarkDeprecated
- (*github.com/spf13/pflag.FlagSet).MarkHidden - (*github.com/spf13/pflag.FlagSet).MarkHidden
gofumpt:
module-path: github.com/databricks/cli
extra-rules: true
#goimports:
# local-prefixes: github.com/databricks/cli
issues: issues:
exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/ exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/

View File

@ -3,7 +3,6 @@ package artifacts
import ( import (
"context" "context"
"fmt" "fmt"
"slices" "slices"
"github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle"

View File

@ -13,8 +13,7 @@ func DetectPackages() bundle.Mutator {
return &autodetect{} return &autodetect{}
} }
type autodetect struct { type autodetect struct{}
}
func (m *autodetect) Name() string { func (m *autodetect) Name() string {
return "artifacts.DetectPackages" return "artifacts.DetectPackages"

View File

@ -96,7 +96,6 @@ func (m *expandGlobs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost
// Set the expanded globs back into the configuration. // Set the expanded globs back into the configuration.
return dyn.SetByPath(v, base, dyn.V(output)) return dyn.SetByPath(v, base, dyn.V(output))
}) })
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -15,8 +15,7 @@ import (
"github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/log"
) )
type detectPkg struct { type detectPkg struct{}
}
func DetectPackage() bundle.Mutator { func DetectPackage() bundle.Mutator {
return &detectPkg{} return &detectPkg{}

View File

@ -186,7 +186,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error)
// Make directory if it doesn't exist yet. // Make directory if it doesn't exist yet.
dir := filepath.Join(parts...) dir := filepath.Join(parts...)
err := os.MkdirAll(dir, 0700) err := os.MkdirAll(dir, 0o700)
if err != nil { if err != nil {
return "", err return "", err
} }
@ -203,7 +203,7 @@ func (b *Bundle) InternalDir(ctx context.Context) (string, error) {
} }
dir := filepath.Join(cacheDir, internalFolder) dir := filepath.Join(cacheDir, internalFolder)
err = os.MkdirAll(dir, 0700) err = os.MkdirAll(dir, 0o700)
if err != nil { if err != nil {
return dir, err return dir, err
} }

View File

@ -47,8 +47,10 @@ type PyDABs struct {
Import []string `json:"import,omitempty"` Import []string `json:"import,omitempty"`
} }
type Command string type (
type ScriptHook string Command string
ScriptHook string
)
// These hook names are subject to change and currently experimental // These hook names are subject to change and currently experimental
const ( const (

View File

@ -6,8 +6,10 @@ import (
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
var jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"}) var (
var taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"}) jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"})
taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"})
)
func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) { func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) {
value := make(map[string]dyn.Value) value := make(map[string]dyn.Value)

View File

@ -27,7 +27,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag.
var out []bundle.Mutator var out []bundle.Mutator
// Map with files we've already seen to avoid loading them twice. // Map with files we've already seen to avoid loading them twice.
var seen = map[string]bool{} seen := map[string]bool{}
for _, file := range config.FileNames { for _, file := range config.FileNames {
seen[file] = true seen[file] = true

View File

@ -481,5 +481,4 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) {
require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment) require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment)
}) })
} }
} }

View File

@ -42,7 +42,6 @@ func rewriteComputeIdToClusterId(v dyn.Value, p dyn.Path) (dyn.Value, diag.Diagn
var diags diag.Diagnostics var diags diag.Diagnostics
computeIdPath := p.Append(dyn.Key("compute_id")) computeIdPath := p.Append(dyn.Key("compute_id"))
computeId, err := dyn.GetByPath(v, computeIdPath) computeId, err := dyn.GetByPath(v, computeIdPath)
// If the "compute_id" key is not set, we don't need to do anything. // If the "compute_id" key is not set, we don't need to do anything.
if err != nil { if err != nil {
return v, nil return v, nil

View File

@ -17,7 +17,7 @@ import (
) )
func touchEmptyFile(t *testing.T, path string) { func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700) err := os.MkdirAll(filepath.Dir(path), 0o700)
require.NoError(t, err) require.NoError(t, err)
f, err := os.Create(path) f, err := os.Create(path)
require.NoError(t, err) require.NoError(t, err)

View File

@ -10,8 +10,7 @@ import (
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
) )
type initializeURLs struct { type initializeURLs struct{}
}
// InitializeURLs makes sure the URL field of each resource is configured. // InitializeURLs makes sure the URL field of each resource is configured.
// NOTE: since this depends on an extra API call, this mutator adds some extra // NOTE: since this depends on an extra API call, this mutator adds some extra
@ -39,7 +38,7 @@ func (m *initializeURLs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn
return nil return nil
} }
func initializeForWorkspace(b *bundle.Bundle, orgId string, host string) error { func initializeForWorkspace(b *bundle.Bundle, orgId, host string) error {
baseURL, err := url.Parse(host) baseURL, err := url.Parse(host)
if err != nil { if err != nil {
return err return err

View File

@ -23,7 +23,7 @@ func (m *overrideCompute) Name() string {
func overrideJobCompute(j *resources.Job, compute string) { func overrideJobCompute(j *resources.Job, compute string) {
for i := range j.Tasks { for i := range j.Tasks {
var task = &j.Tasks[i] task := &j.Tasks[i]
if task.ForEachTask != nil { if task.ForEachTask != nil {
task = &task.ForEachTask.Task task = &task.ForEachTask.Task

View File

@ -95,7 +95,7 @@ func jobRewritePatterns() []jobRewritePattern {
// VisitJobPaths visits all paths in job resources and applies a function to each path. // VisitJobPaths visits all paths in job resources and applies a function to each path.
func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) { func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) {
var err error var err error
var newValue = value newValue := value
for _, rewritePattern := range jobRewritePatterns() { for _, rewritePattern := range jobRewritePatterns() {
newValue, err = dyn.MapByPattern(newValue, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { newValue, err = dyn.MapByPattern(newValue, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
@ -105,7 +105,6 @@ func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) {
return fn(p, rewritePattern.kind, v) return fn(p, rewritePattern.kind, v)
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }

View File

@ -57,14 +57,12 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return dyn.NewValue(fmt.Sprintf("/Workspace%s", path), v.Locations()), nil return dyn.NewValue(fmt.Sprintf("/Workspace%s", path), v.Locations()), nil
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
} }
return v, nil return v, nil
}) })
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -30,7 +30,6 @@ type parsePythonDiagnosticsTest struct {
} }
func TestParsePythonDiagnostics(t *testing.T) { func TestParsePythonDiagnostics(t *testing.T) {
testCases := []parsePythonDiagnosticsTest{ testCases := []parsePythonDiagnosticsTest{
{ {
name: "short error with location", name: "short error with location",

View File

@ -9,12 +9,11 @@ import (
"io" "io"
"os" "os"
"path/filepath" "path/filepath"
"strings"
"github.com/databricks/databricks-sdk-go/logger" "github.com/databricks/databricks-sdk-go/logger"
"github.com/fatih/color" "github.com/fatih/color"
"strings"
"github.com/databricks/cli/libs/python" "github.com/databricks/cli/libs/python"
"github.com/databricks/cli/bundle/env" "github.com/databricks/cli/bundle/env"
@ -94,11 +93,10 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno
// mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics' // mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics'
var mutateDiags diag.Diagnostics var mutateDiags diag.Diagnostics
var mutateDiagsHasError = errors.New("unexpected error") mutateDiagsHasError := errors.New("unexpected error")
err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) { err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) {
pythonPath, err := detectExecutable(ctx, experimental.PyDABs.VEnvPath) pythonPath, err := detectExecutable(ctx, experimental.PyDABs.VEnvPath)
if err != nil { if err != nil {
return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err) return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err)
} }
@ -141,7 +139,7 @@ func createCacheDir(ctx context.Context) (string, error) {
// use 'default' as target name // use 'default' as target name
cacheDir := filepath.Join(tempDir, "default", "pydabs") cacheDir := filepath.Join(tempDir, "default", "pydabs")
err := os.MkdirAll(cacheDir, 0700) err := os.MkdirAll(cacheDir, 0o700)
if err != nil { if err != nil {
return "", err return "", err
} }
@ -152,7 +150,7 @@ func createCacheDir(ctx context.Context) (string, error) {
return os.MkdirTemp("", "-pydabs") return os.MkdirTemp("", "-pydabs")
} }
func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, rootPath string, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) { func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) {
inputPath := filepath.Join(cacheDir, "input.json") inputPath := filepath.Join(cacheDir, "input.json")
outputPath := filepath.Join(cacheDir, "output.json") outputPath := filepath.Join(cacheDir, "output.json")
diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json") diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json")
@ -263,10 +261,10 @@ func writeInputFile(inputPath string, input dyn.Value) error {
return fmt.Errorf("failed to marshal input: %w", err) return fmt.Errorf("failed to marshal input: %w", err)
} }
return os.WriteFile(inputPath, rootConfigJson, 0600) return os.WriteFile(inputPath, rootConfigJson, 0o600)
} }
func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) { func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) {
outputFile, err := os.Open(outputPath) outputFile, err := os.Open(outputPath)
if err != nil { if err != nil {
return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err)) return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err))
@ -381,7 +379,7 @@ func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
return right, nil return right, nil
}, },
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String()) return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
}, },
} }
@ -430,7 +428,7 @@ func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor {
return right, nil return right, nil
}, },
VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) {
if !valuePath.HasPrefix(jobsPath) { if !valuePath.HasPrefix(jobsPath) {
return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String()) return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String())
} }

View File

@ -106,7 +106,6 @@ func TestPythonMutator_load(t *testing.T) {
Column: 5, Column: 5,
}, },
}, diags[0].Locations) }, diags[0].Locations)
} }
func TestPythonMutator_load_disallowed(t *testing.T) { func TestPythonMutator_load_disallowed(t *testing.T) {
@ -588,7 +587,7 @@ or activate the environment before running CLI commands:
assert.Equal(t, expected, out) assert.Equal(t, expected, out)
} }
func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context { func withProcessStub(t *testing.T, args []string, output, diagnostics string) context.Context {
ctx := context.Background() ctx := context.Background()
ctx, stub := process.WithStub(ctx) ctx, stub := process.WithStub(ctx)
@ -611,10 +610,10 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str
assert.NoError(t, err) assert.NoError(t, err)
if reflect.DeepEqual(actual.Args, args) { if reflect.DeepEqual(actual.Args, args) {
err := os.WriteFile(outputPath, []byte(output), 0600) err := os.WriteFile(outputPath, []byte(output), 0o600)
require.NoError(t, err) require.NoError(t, err)
err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0600) err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0o600)
require.NoError(t, err) require.NoError(t, err)
return nil return nil
@ -626,7 +625,7 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str
return ctx return ctx
} }
func loadYaml(name string, content string) *bundle.Bundle { func loadYaml(name, content string) *bundle.Bundle {
v, diag := config.LoadFromBytes(name, []byte(content)) v, diag := config.LoadFromBytes(name, []byte(content))
if diag.Error() != nil { if diag.Error() != nil {
@ -650,17 +649,17 @@ func withFakeVEnv(t *testing.T, venvPath string) {
interpreterPath := interpreterPath(venvPath) interpreterPath := interpreterPath(venvPath)
err = os.MkdirAll(filepath.Dir(interpreterPath), 0755) err = os.MkdirAll(filepath.Dir(interpreterPath), 0o755)
if err != nil { if err != nil {
panic(err) panic(err)
} }
err = os.WriteFile(interpreterPath, []byte(""), 0755) err = os.WriteFile(interpreterPath, []byte(""), 0o755)
if err != nil { if err != nil {
panic(err) panic(err)
} }
err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0755) err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0o755)
if err != nil { if err != nil {
panic(err) panic(err)
} }

View File

@ -32,7 +32,8 @@ func ResolveVariableReferencesInLookup() bundle.Mutator {
} }
func ResolveVariableReferencesInComplexVariables() bundle.Mutator { func ResolveVariableReferencesInComplexVariables() bundle.Mutator {
return &resolveVariableReferences{prefixes: []string{ return &resolveVariableReferences{
prefixes: []string{
"bundle", "bundle",
"workspace", "workspace",
"variables", "variables",
@ -173,7 +174,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
return dyn.InvalidValue, dynvar.ErrSkipResolution return dyn.InvalidValue, dynvar.ErrSkipResolution
}) })
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
@ -184,7 +184,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle)
diags = diags.Extend(normaliseDiags) diags = diags.Extend(normaliseDiags)
return root, nil return root, nil
}) })
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))
} }

View File

@ -63,7 +63,6 @@ func (m *rewriteWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di
return v, nil return v, nil
}) })
}) })
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -81,5 +81,4 @@ func TestNoWorkspacePrefixUsed(t *testing.T) {
require.Equal(t, "${workspace.artifact_path}/jar1.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[1].Libraries[0].Jar) require.Equal(t, "${workspace.artifact_path}/jar1.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[1].Libraries[0].Jar)
require.Equal(t, "${workspace.file_path}/notebook2", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].NotebookTask.NotebookPath) require.Equal(t, "${workspace.file_path}/notebook2", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].NotebookTask.NotebookPath)
require.Equal(t, "${workspace.artifact_path}/jar2.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].Libraries[0].Jar) require.Equal(t, "${workspace.artifact_path}/jar2.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].Libraries[0].Jar)
} }

View File

@ -12,8 +12,7 @@ import (
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
type setRunAs struct { type setRunAs struct{}
}
// This mutator does two things: // This mutator does two things:
// //
@ -30,7 +29,7 @@ func (m *setRunAs) Name() string {
return "SetRunAs" return "SetRunAs"
} }
func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser string, runAsUser string) diag.Diagnostics { func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser, runAsUser string) diag.Diagnostics {
return diag.Diagnostics{{ return diag.Diagnostics{{
Summary: fmt.Sprintf("%s do not support a setting a run_as user that is different from the owner.\n"+ Summary: fmt.Sprintf("%s do not support a setting a run_as user that is different from the owner.\n"+
"Current identity: %s. Run as identity: %s.\n"+ "Current identity: %s. Run as identity: %s.\n"+

View File

@ -65,7 +65,6 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable,
// We should have had a value to set for the variable at this point. // We should have had a value to set for the variable at this point.
return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name)
} }
func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {

View File

@ -35,7 +35,7 @@ func (m *syncInferRoot) Name() string {
// If the path does not exist, it returns an empty string. // If the path does not exist, it returns an empty string.
// //
// See "sync_infer_root_internal_test.go" for examples. // See "sync_infer_root_internal_test.go" for examples.
func (m *syncInferRoot) computeRoot(path string, root string) string { func (m *syncInferRoot) computeRoot(path, root string) string {
for !filepath.IsLocal(path) { for !filepath.IsLocal(path) {
// Break if we have reached the root of the filesystem. // Break if we have reached the root of the filesystem.
dir := filepath.Dir(root) dir := filepath.Dir(root)

View File

@ -275,8 +275,8 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos
} }
func gatherFallbackPaths(v dyn.Value, typ string) (map[string]string, error) { func gatherFallbackPaths(v dyn.Value, typ string) (map[string]string, error) {
var fallback = make(map[string]string) fallback := make(map[string]string)
var pattern = dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey()) pattern := dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey())
// Previous behavior was to use a resource's location as the base path to resolve // Previous behavior was to use a resource's location as the base path to resolve
// relative paths in its definition. With the introduction of [dyn.Value] throughout, // relative paths in its definition. With the introduction of [dyn.Value] throughout,

View File

@ -34,7 +34,7 @@ func touchNotebookFile(t *testing.T, path string) {
} }
func touchEmptyFile(t *testing.T, path string) { func touchEmptyFile(t *testing.T, path string) {
err := os.MkdirAll(filepath.Dir(path), 0700) err := os.MkdirAll(filepath.Dir(path), 0o700)
require.NoError(t, err) require.NoError(t, err)
f, err := os.Create(path) f, err := os.Create(path)
require.NoError(t, err) require.NoError(t, err)

View File

@ -15,8 +15,7 @@ func VerifyCliVersion() bundle.Mutator {
return &verifyCliVersion{} return &verifyCliVersion{}
} }
type verifyCliVersion struct { type verifyCliVersion struct{}
}
func (v *verifyCliVersion) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { func (v *verifyCliVersion) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
// No constraints specified, skip the check. // No constraints specified, skip the check.

View File

@ -1,7 +1,9 @@
package config package config
const Paused = "PAUSED" const (
const Unpaused = "UNPAUSED" Paused = "PAUSED"
Unpaused = "UNPAUSED"
)
type Presets struct { type Presets struct {
// NamePrefix to prepend to all resource names. // NamePrefix to prepend to all resource names.

View File

@ -168,7 +168,6 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) {
"key1": "value1", "key1": "value1",
}, root.Variables["complex"].Default) }, root.Variables["complex"].Default)
assert.Equal(t, "complex var", root.Variables["complex"].Description) assert.Equal(t, "complex var", root.Variables["complex"].Description)
} }
func TestIsFullVariableOverrideDef(t *testing.T) { func TestIsFullVariableOverrideDef(t *testing.T) {
@ -252,5 +251,4 @@ func TestIsFullVariableOverrideDef(t *testing.T) {
for i, tc := range testCases { for i, tc := range testCases {
assert.Equal(t, tc.expected, isFullVariableOverrideDef(tc.value), "test case %d", i) assert.Equal(t, tc.expected, isFullVariableOverrideDef(tc.value), "test case %d", i)
} }
} }

View File

@ -13,8 +13,7 @@ func FilesToSync() bundle.ReadOnlyMutator {
return &filesToSync{} return &filesToSync{}
} }
type filesToSync struct { type filesToSync struct{}
}
func (v *filesToSync) Name() string { func (v *filesToSync) Name() string {
return "validate:files_to_sync" return "validate:files_to_sync"

View File

@ -15,8 +15,7 @@ import (
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
type folderPermissions struct { type folderPermissions struct{}
}
// Apply implements bundle.ReadOnlyMutator. // Apply implements bundle.ReadOnlyMutator.
func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics { func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics {

View File

@ -13,8 +13,7 @@ func JobClusterKeyDefined() bundle.ReadOnlyMutator {
return &jobClusterKeyDefined{} return &jobClusterKeyDefined{}
} }
type jobClusterKeyDefined struct { type jobClusterKeyDefined struct{}
}
func (v *jobClusterKeyDefined) Name() string { func (v *jobClusterKeyDefined) Name() string {
return "validate:job_cluster_key_defined" return "validate:job_cluster_key_defined"

View File

@ -17,8 +17,7 @@ func JobTaskClusterSpec() bundle.ReadOnlyMutator {
return &jobTaskClusterSpec{} return &jobTaskClusterSpec{}
} }
type jobTaskClusterSpec struct { type jobTaskClusterSpec struct{}
}
func (v *jobTaskClusterSpec) Name() string { func (v *jobTaskClusterSpec) Name() string {
return "validate:job_task_cluster_spec" return "validate:job_task_cluster_spec"

View File

@ -175,7 +175,6 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) {
Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.foo.job_clusters[0].new_cluster")}, Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.foo.job_clusters[0].new_cluster")},
}, },
}, diags) }, diags)
}) })
} }
} }

View File

@ -8,8 +8,7 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
type validate struct { type validate struct{}
}
type location struct { type location struct {
path string path string

View File

@ -17,8 +17,7 @@ func ValidateSyncPatterns() bundle.ReadOnlyMutator {
return &validateSyncPatterns{} return &validateSyncPatterns{}
} }
type validateSyncPatterns struct { type validateSyncPatterns struct{}
}
func (v *validateSyncPatterns) Name() string { func (v *validateSyncPatterns) Name() string {
return "validate:validate_sync_patterns" return "validate:validate_sync_patterns"

View File

@ -42,7 +42,6 @@ func TestLookup_Empty(t *testing.T) {
// No string representation for an invalid lookup // No string representation for an invalid lookup
assert.Empty(t, lookup.String()) assert.Empty(t, lookup.String())
} }
func TestLookup_Multiple(t *testing.T) { func TestLookup_Multiple(t *testing.T) {

View File

@ -20,7 +20,6 @@ func (l resolveCluster) Resolve(ctx context.Context, w *databricks.WorkspaceClie
ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi}, ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi},
}, },
}) })
if err != nil { if err != nil {
return "", err return "", err
} }

View File

@ -15,7 +15,7 @@ func (d *DeferredMutator) Name() string {
return "deferred" return "deferred"
} }
func Defer(mutator Mutator, finally Mutator) Mutator { func Defer(mutator, finally Mutator) Mutator {
return &DeferredMutator{ return &DeferredMutator{
mutator: mutator, mutator: mutator,
finally: finally, finally: finally,

View File

@ -15,8 +15,10 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
) )
const DeploymentStateFileName = "deployment.json" const (
const DeploymentStateVersion = 1 DeploymentStateFileName = "deployment.json"
DeploymentStateVersion = 1
)
type File struct { type File struct {
LocalPath string `json:"local_path"` LocalPath string `json:"local_path"`
@ -132,7 +134,7 @@ func (f Filelist) ToSlice(root vfs.Path) []fileset.File {
return files return files
} }
func isLocalStateStale(local io.Reader, remote io.Reader) bool { func isLocalStateStale(local, remote io.Reader) bool {
localState, err := loadState(local) localState, err := loadState(local)
if err != nil { if err != nil {
return true return true

View File

@ -44,7 +44,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
return diag.FromErr(err) return diag.FromErr(err)
} }
local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600) local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0o600)
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -99,7 +99,7 @@ func testStatePull(t *testing.T, opts statePullOpts) {
snapshotPath, err := sync.SnapshotPath(opts) snapshotPath, err := sync.SnapshotPath(opts)
require.NoError(t, err) require.NoError(t, err)
err = os.WriteFile(snapshotPath, []byte("snapshot"), 0644) err = os.WriteFile(snapshotPath, []byte("snapshot"), 0o644)
require.NoError(t, err) require.NoError(t, err)
} }
@ -110,7 +110,7 @@ func testStatePull(t *testing.T, opts statePullOpts) {
data, err := json.Marshal(opts.localState) data, err := json.Marshal(opts.localState)
require.NoError(t, err) require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644) err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err) require.NoError(t, err)
} }

View File

@ -74,7 +74,7 @@ func TestStatePush(t *testing.T) {
data, err := json.Marshal(state) data, err := json.Marshal(state)
require.NoError(t, err) require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644) err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)

View File

@ -17,8 +17,7 @@ import (
"github.com/google/uuid" "github.com/google/uuid"
) )
type stateUpdate struct { type stateUpdate struct{}
}
func (s *stateUpdate) Name() string { func (s *stateUpdate) Name() string {
return "deploy:state-update" return "deploy:state-update"
@ -57,7 +56,7 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost
return diag.FromErr(err) return diag.FromErr(err)
} }
// Write the state back to the file. // Write the state back to the file.
f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600) f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0o600)
if err != nil { if err != nil {
log.Infof(ctx, "Unable to open deployment state file: %s", err) log.Infof(ctx, "Unable to open deployment state file: %s", err)
return diag.FromErr(err) return diag.FromErr(err)

View File

@ -119,7 +119,7 @@ func TestStateUpdateWithExistingState(t *testing.T) {
data, err := json.Marshal(state) data, err := json.Marshal(state)
require.NoError(t, err) require.NoError(t, err)
err = os.WriteFile(statePath, data, 0644) err = os.WriteFile(statePath, data, 0o644)
require.NoError(t, err) require.NoError(t, err)
diags := bundle.Apply(ctx, b, s) diags := bundle.Apply(ctx, b, s)

View File

@ -42,8 +42,7 @@ func collectDashboardsFromState(ctx context.Context, b *bundle.Bundle) ([]dashbo
return dashboards, nil return dashboards, nil
} }
type checkDashboardsModifiedRemotely struct { type checkDashboardsModifiedRemotely struct{}
}
func (l *checkDashboardsModifiedRemotely) Name() string { func (l *checkDashboardsModifiedRemotely) Name() string {
return "CheckDashboardsModifiedRemotely" return "CheckDashboardsModifiedRemotely"

View File

@ -23,8 +23,7 @@ func (e ErrResourceIsRunning) Error() string {
return fmt.Sprintf("%s %s is running", e.resourceType, e.resourceId) return fmt.Sprintf("%s %s is running", e.resourceType, e.resourceId)
} }
type checkRunningResources struct { type checkRunningResources struct{}
}
func (l *checkRunningResources) Name() string { func (l *checkRunningResources) Name() string {
return "check-running-resources" return "check-running-resources"

View File

@ -43,7 +43,7 @@ func convertToResourceStruct[T any](t *testing.T, resource *T, data any) {
} }
func TestBundleToTerraformJob(t *testing.T) { func TestBundleToTerraformJob(t *testing.T) {
var src = resources.Job{ src := resources.Job{
JobSettings: &jobs.JobSettings{ JobSettings: &jobs.JobSettings{
Name: "my job", Name: "my job",
JobClusters: []jobs.JobCluster{ JobClusters: []jobs.JobCluster{
@ -71,7 +71,7 @@ func TestBundleToTerraformJob(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"my_job": &src, "my_job": &src,
@ -93,7 +93,7 @@ func TestBundleToTerraformJob(t *testing.T) {
} }
func TestBundleToTerraformJobPermissions(t *testing.T) { func TestBundleToTerraformJobPermissions(t *testing.T) {
var src = resources.Job{ src := resources.Job{
Permissions: []resources.Permission{ Permissions: []resources.Permission{
{ {
Level: "CAN_VIEW", Level: "CAN_VIEW",
@ -102,7 +102,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"my_job": &src, "my_job": &src,
@ -121,7 +121,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) {
} }
func TestBundleToTerraformJobTaskLibraries(t *testing.T) { func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
var src = resources.Job{ src := resources.Job{
JobSettings: &jobs.JobSettings{ JobSettings: &jobs.JobSettings{
Name: "my job", Name: "my job",
Tasks: []jobs.Task{ Tasks: []jobs.Task{
@ -139,7 +139,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"my_job": &src, "my_job": &src,
@ -158,7 +158,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) {
} }
func TestBundleToTerraformForEachTaskLibraries(t *testing.T) { func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
var src = resources.Job{ src := resources.Job{
JobSettings: &jobs.JobSettings{ JobSettings: &jobs.JobSettings{
Name: "my job", Name: "my job",
Tasks: []jobs.Task{ Tasks: []jobs.Task{
@ -182,7 +182,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"my_job": &src, "my_job": &src,
@ -201,7 +201,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) {
} }
func TestBundleToTerraformPipeline(t *testing.T) { func TestBundleToTerraformPipeline(t *testing.T) {
var src = resources.Pipeline{ src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{ PipelineSpec: &pipelines.PipelineSpec{
Name: "my pipeline", Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{
@ -239,7 +239,7 @@ func TestBundleToTerraformPipeline(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"my_pipeline": &src, "my_pipeline": &src,
@ -262,7 +262,7 @@ func TestBundleToTerraformPipeline(t *testing.T) {
} }
func TestBundleToTerraformPipelinePermissions(t *testing.T) { func TestBundleToTerraformPipelinePermissions(t *testing.T) {
var src = resources.Pipeline{ src := resources.Pipeline{
Permissions: []resources.Permission{ Permissions: []resources.Permission{
{ {
Level: "CAN_VIEW", Level: "CAN_VIEW",
@ -271,7 +271,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Pipelines: map[string]*resources.Pipeline{ Pipelines: map[string]*resources.Pipeline{
"my_pipeline": &src, "my_pipeline": &src,
@ -290,7 +290,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) {
} }
func TestBundleToTerraformModel(t *testing.T) { func TestBundleToTerraformModel(t *testing.T) {
var src = resources.MlflowModel{ src := resources.MlflowModel{
Model: &ml.Model{ Model: &ml.Model{
Name: "name", Name: "name",
Description: "description", Description: "description",
@ -307,7 +307,7 @@ func TestBundleToTerraformModel(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Models: map[string]*resources.MlflowModel{ Models: map[string]*resources.MlflowModel{
"my_model": &src, "my_model": &src,
@ -330,7 +330,7 @@ func TestBundleToTerraformModel(t *testing.T) {
} }
func TestBundleToTerraformModelPermissions(t *testing.T) { func TestBundleToTerraformModelPermissions(t *testing.T) {
var src = resources.MlflowModel{ src := resources.MlflowModel{
Model: &ml.Model{ Model: &ml.Model{
Name: "name", Name: "name",
}, },
@ -342,7 +342,7 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Models: map[string]*resources.MlflowModel{ Models: map[string]*resources.MlflowModel{
"my_model": &src, "my_model": &src,
@ -361,13 +361,13 @@ func TestBundleToTerraformModelPermissions(t *testing.T) {
} }
func TestBundleToTerraformExperiment(t *testing.T) { func TestBundleToTerraformExperiment(t *testing.T) {
var src = resources.MlflowExperiment{ src := resources.MlflowExperiment{
Experiment: &ml.Experiment{ Experiment: &ml.Experiment{
Name: "name", Name: "name",
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Experiments: map[string]*resources.MlflowExperiment{ Experiments: map[string]*resources.MlflowExperiment{
"my_experiment": &src, "my_experiment": &src,
@ -384,7 +384,7 @@ func TestBundleToTerraformExperiment(t *testing.T) {
} }
func TestBundleToTerraformExperimentPermissions(t *testing.T) { func TestBundleToTerraformExperimentPermissions(t *testing.T) {
var src = resources.MlflowExperiment{ src := resources.MlflowExperiment{
Experiment: &ml.Experiment{ Experiment: &ml.Experiment{
Name: "name", Name: "name",
}, },
@ -396,7 +396,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Experiments: map[string]*resources.MlflowExperiment{ Experiments: map[string]*resources.MlflowExperiment{
"my_experiment": &src, "my_experiment": &src,
@ -415,7 +415,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) {
} }
func TestBundleToTerraformModelServing(t *testing.T) { func TestBundleToTerraformModelServing(t *testing.T) {
var src = resources.ModelServingEndpoint{ src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name", Name: "name",
Config: serving.EndpointCoreConfigInput{ Config: serving.EndpointCoreConfigInput{
@ -439,7 +439,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"my_model_serving_endpoint": &src, "my_model_serving_endpoint": &src,
@ -462,7 +462,7 @@ func TestBundleToTerraformModelServing(t *testing.T) {
} }
func TestBundleToTerraformModelServingPermissions(t *testing.T) { func TestBundleToTerraformModelServingPermissions(t *testing.T) {
var src = resources.ModelServingEndpoint{ src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name", Name: "name",
@ -492,7 +492,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{
"my_model_serving_endpoint": &src, "my_model_serving_endpoint": &src,
@ -511,7 +511,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) {
} }
func TestBundleToTerraformRegisteredModel(t *testing.T) { func TestBundleToTerraformRegisteredModel(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name", Name: "name",
CatalogName: "catalog", CatalogName: "catalog",
@ -520,7 +520,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
RegisteredModels: map[string]*resources.RegisteredModel{ RegisteredModels: map[string]*resources.RegisteredModel{
"my_registered_model": &src, "my_registered_model": &src,
@ -540,7 +540,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) {
} }
func TestBundleToTerraformRegisteredModelGrants(t *testing.T) { func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name", Name: "name",
CatalogName: "catalog", CatalogName: "catalog",
@ -554,7 +554,7 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
}, },
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
RegisteredModels: map[string]*resources.RegisteredModel{ RegisteredModels: map[string]*resources.RegisteredModel{
"my_registered_model": &src, "my_registered_model": &src,
@ -573,14 +573,14 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) {
} }
func TestBundleToTerraformDeletedResources(t *testing.T) { func TestBundleToTerraformDeletedResources(t *testing.T) {
var job1 = resources.Job{ job1 := resources.Job{
JobSettings: &jobs.JobSettings{}, JobSettings: &jobs.JobSettings{},
} }
var job2 = resources.Job{ job2 := resources.Job{
ModifiedStatus: resources.ModifiedStatusDeleted, ModifiedStatus: resources.ModifiedStatusDeleted,
JobSettings: &jobs.JobSettings{}, JobSettings: &jobs.JobSettings{},
} }
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"my_job1": &job1, "my_job1": &job1,
@ -601,10 +601,10 @@ func TestBundleToTerraformDeletedResources(t *testing.T) {
} }
func TestTerraformToBundleEmptyLocalResources(t *testing.T) { func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
var config = config.Root{ config := config.Root{
Resources: config.Resources{}, Resources: config.Resources{},
} }
var tfState = resourcesState{ tfState := resourcesState{
Resources: []stateResource{ Resources: []stateResource{
{ {
Type: "databricks_job", Type: "databricks_job",
@ -736,7 +736,7 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) {
} }
func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"test_job": { "test_job": {
@ -817,7 +817,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
}, },
}, },
} }
var tfState = resourcesState{ tfState := resourcesState{
Resources: nil, Resources: nil,
} }
err := TerraformToBundle(&tfState, &config) err := TerraformToBundle(&tfState, &config)
@ -860,7 +860,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) {
} }
func TestTerraformToBundleModifiedResources(t *testing.T) { func TestTerraformToBundleModifiedResources(t *testing.T) {
var config = config.Root{ config := config.Root{
Resources: config.Resources{ Resources: config.Resources{
Jobs: map[string]*resources.Job{ Jobs: map[string]*resources.Job{
"test_job": { "test_job": {
@ -996,7 +996,7 @@ func TestTerraformToBundleModifiedResources(t *testing.T) {
}, },
}, },
} }
var tfState = resourcesState{ tfState := resourcesState{
Resources: []stateResource{ Resources: []stateResource{
{ {
Type: "databricks_job", Type: "databricks_job",

View File

@ -145,7 +145,7 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error {
// This function is used for env vars set by the Databricks VSCode extension. The variables are intended to be used by the CLI // This function is used for env vars set by the Databricks VSCode extension. The variables are intended to be used by the CLI
// bundled with the Databricks VSCode extension, but users can use different CLI versions in the VSCode terminals, in which case we want to ignore // bundled with the Databricks VSCode extension, but users can use different CLI versions in the VSCode terminals, in which case we want to ignore
// the variables if that CLI uses different versions of the dependencies. // the variables if that CLI uses different versions of the dependencies.
func getEnvVarWithMatchingVersion(ctx context.Context, envVarName string, versionVarName string, currentVersion string) (string, error) { func getEnvVarWithMatchingVersion(ctx context.Context, envVarName, versionVarName, currentVersion string) (string, error) {
envValue := env.Get(ctx, envVarName) envValue := env.Get(ctx, envVarName)
versionValue := env.Get(ctx, versionVarName) versionValue := env.Get(ctx, versionVarName)

View File

@ -400,7 +400,7 @@ func TestFindExecPathFromEnvironmentWithCorrectVersionAndBinary(t *testing.T) {
require.Equal(t, tmpBinPath, b.Config.Bundle.Terraform.ExecPath) require.Equal(t, tmpBinPath, b.Config.Bundle.Terraform.ExecPath)
} }
func createTempFile(t *testing.T, dest string, name string, executable bool) string { func createTempFile(t *testing.T, dest, name string, executable bool) string {
binPath := filepath.Join(dest, name) binPath := filepath.Join(dest, name)
f, err := os.Create(binPath) f, err := os.Create(binPath)
require.NoError(t, err) require.NoError(t, err)
@ -409,7 +409,7 @@ func createTempFile(t *testing.T, dest string, name string, executable bool) str
require.NoError(t, err) require.NoError(t, err)
}() }()
if executable { if executable {
err = f.Chmod(0777) err = f.Chmod(0o777)
require.NoError(t, err) require.NoError(t, err)
} }
return binPath return binPath
@ -422,7 +422,7 @@ func TestGetEnvVarWithMatchingVersion(t *testing.T) {
tmp := t.TempDir() tmp := t.TempDir()
file := testutil.Touch(t, tmp, "bar") file := testutil.Touch(t, tmp, "bar")
var tc = []struct { tc := []struct {
envValue string envValue string
versionValue string versionValue string
currentVersion string currentVersion string

View File

@ -10,8 +10,7 @@ import (
"github.com/databricks/cli/libs/dyn/dynvar" "github.com/databricks/cli/libs/dyn/dynvar"
) )
type interpolateMutator struct { type interpolateMutator struct{}
}
func Interpolate() bundle.Mutator { func Interpolate() bundle.Mutator {
return &interpolateMutator{} return &interpolateMutator{}

View File

@ -5,15 +5,19 @@ import (
"github.com/hashicorp/go-version" "github.com/hashicorp/go-version"
) )
const TerraformStateFileName = "terraform.tfstate" const (
const TerraformConfigFileName = "bundle.tf.json" TerraformStateFileName = "terraform.tfstate"
TerraformConfigFileName = "bundle.tf.json"
)
// Users can provide their own terraform binary and databricks terraform provider by setting the following environment variables. // Users can provide their own terraform binary and databricks terraform provider by setting the following environment variables.
// This allows users to use the CLI in an air-gapped environments. See the `debug terraform` command. // This allows users to use the CLI in an air-gapped environments. See the `debug terraform` command.
const TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH" const (
const TerraformVersionEnv = "DATABRICKS_TF_VERSION" TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH"
const TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE" TerraformVersionEnv = "DATABRICKS_TF_VERSION"
const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION" TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE"
TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION"
)
// Terraform CLI version to use and the corresponding checksums for it. The // Terraform CLI version to use and the corresponding checksums for it. The
// checksums are used to verify the integrity of the downloaded binary. Please // checksums are used to verify the integrity of the downloaded binary. Please
@ -26,8 +30,10 @@ const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION"
// downloaded Terraform archive. // downloaded Terraform archive.
var TerraformVersion = version.Must(version.NewVersion("1.5.5")) var TerraformVersion = version.Must(version.NewVersion("1.5.5"))
const checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2" const (
const checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a" checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2"
checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a"
)
type Checksum struct { type Checksum struct {
LinuxArm64 string `json:"linux_arm64"` LinuxArm64 string `json:"linux_arm64"`

View File

@ -14,7 +14,7 @@ import (
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
func downloadAndChecksum(t *testing.T, url string, expectedChecksum string) { func downloadAndChecksum(t *testing.T, url, expectedChecksum string) {
resp, err := http.Get(url) resp, err := http.Get(url)
require.NoError(t, err) require.NoError(t, err)
defer resp.Body.Close() defer resp.Body.Close()

View File

@ -104,7 +104,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
localState, err := l.localState(ctx, b) localState, err := l.localState(ctx, b)
if errors.Is(err, fs.ErrNotExist) { if errors.Is(err, fs.ErrNotExist) {
log.Infof(ctx, "Local state file does not exist. Using remote Terraform state.") log.Infof(ctx, "Local state file does not exist. Using remote Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600) err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err) return diag.FromErr(err)
} }
if err != nil { if err != nil {
@ -114,14 +114,14 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic
// If the lineage does not match, the Terraform state files do not correspond to the same deployment. // If the lineage does not match, the Terraform state files do not correspond to the same deployment.
if localState.Lineage != remoteState.Lineage { if localState.Lineage != remoteState.Lineage {
log.Infof(ctx, "Remote and local state lineages do not match. Using remote Terraform state. Invalidating local Terraform state.") log.Infof(ctx, "Remote and local state lineages do not match. Using remote Terraform state. Invalidating local Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600) err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err) return diag.FromErr(err)
} }
// If the remote state is newer than the local state, we should use the remote state. // If the remote state is newer than the local state, we should use the remote state.
if remoteState.Serial > localState.Serial { if remoteState.Serial > localState.Serial {
log.Infof(ctx, "Remote state is newer than local state. Using remote Terraform state.") log.Infof(ctx, "Remote state is newer than local state. Using remote Terraform state.")
err := os.WriteFile(localStatePath, remoteContent, 0600) err := os.WriteFile(localStatePath, remoteContent, 0o600)
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertCluster(t *testing.T) { func TestConvertCluster(t *testing.T) {
var src = resources.Cluster{ src := resources.Cluster{
ClusterSpec: &compute.ClusterSpec{ ClusterSpec: &compute.ClusterSpec{
NumWorkers: 3, NumWorkers: 3,
SparkVersion: "13.3.x-scala2.12", SparkVersion: "13.3.x-scala2.12",
@ -93,5 +93,4 @@ func TestConvertCluster(t *testing.T) {
}, },
}, },
}, out.Permissions["cluster_my_cluster"]) }, out.Permissions["cluster_my_cluster"])
} }

View File

@ -17,7 +17,7 @@ const (
) )
// Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output. // Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output.
func marshalSerializedDashboard(vin dyn.Value, vout dyn.Value) (dyn.Value, error) { func marshalSerializedDashboard(vin, vout dyn.Value) (dyn.Value, error) {
// Skip if the "serialized_dashboard" field is already set. // Skip if the "serialized_dashboard" field is already set.
if v := vout.Get(serializedDashboardFieldName); v.IsValid() { if v := vout.Get(serializedDashboardFieldName); v.IsValid() {
return vout, nil return vout, nil

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertDashboard(t *testing.T) { func TestConvertDashboard(t *testing.T) {
var src = resources.Dashboard{ src := resources.Dashboard{
Dashboard: &dashboards.Dashboard{ Dashboard: &dashboards.Dashboard{
DisplayName: "my dashboard", DisplayName: "my dashboard",
WarehouseId: "f00dcafe", WarehouseId: "f00dcafe",
@ -60,7 +60,7 @@ func TestConvertDashboard(t *testing.T) {
} }
func TestConvertDashboardFilePath(t *testing.T) { func TestConvertDashboardFilePath(t *testing.T) {
var src = resources.Dashboard{ src := resources.Dashboard{
FilePath: "some/path", FilePath: "some/path",
} }
@ -84,7 +84,7 @@ func TestConvertDashboardFilePath(t *testing.T) {
} }
func TestConvertDashboardFilePathQuoted(t *testing.T) { func TestConvertDashboardFilePathQuoted(t *testing.T) {
var src = resources.Dashboard{ src := resources.Dashboard{
FilePath: `C:\foo\bar\baz\dashboard.lvdash.json`, FilePath: `C:\foo\bar\baz\dashboard.lvdash.json`,
} }
@ -108,7 +108,7 @@ func TestConvertDashboardFilePathQuoted(t *testing.T) {
} }
func TestConvertDashboardSerializedDashboardString(t *testing.T) { func TestConvertDashboardSerializedDashboardString(t *testing.T) {
var src = resources.Dashboard{ src := resources.Dashboard{
SerializedDashboard: `{ "json": true }`, SerializedDashboard: `{ "json": true }`,
} }
@ -127,7 +127,7 @@ func TestConvertDashboardSerializedDashboardString(t *testing.T) {
} }
func TestConvertDashboardSerializedDashboardAny(t *testing.T) { func TestConvertDashboardSerializedDashboardAny(t *testing.T) {
var src = resources.Dashboard{ src := resources.Dashboard{
SerializedDashboard: map[string]any{ SerializedDashboard: map[string]any{
"pages": []map[string]any{ "pages": []map[string]any{
{ {

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertExperiment(t *testing.T) { func TestConvertExperiment(t *testing.T) {
var src = resources.MlflowExperiment{ src := resources.MlflowExperiment{
Experiment: &ml.Experiment{ Experiment: &ml.Experiment{
Name: "name", Name: "name",
}, },

View File

@ -13,7 +13,7 @@ import (
) )
func TestConvertGrants(t *testing.T) { func TestConvertGrants(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
Grants: []resources.Grant{ Grants: []resources.Grant{
{ {
Privileges: []string{"EXECUTE", "FOO"}, Privileges: []string{"EXECUTE", "FOO"},
@ -45,7 +45,7 @@ func TestConvertGrants(t *testing.T) {
} }
func TestConvertGrantsNil(t *testing.T) { func TestConvertGrantsNil(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
Grants: nil, Grants: nil,
} }
@ -58,7 +58,7 @@ func TestConvertGrantsNil(t *testing.T) {
} }
func TestConvertGrantsEmpty(t *testing.T) { func TestConvertGrantsEmpty(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
Grants: []resources.Grant{}, Grants: []resources.Grant{},
} }

View File

@ -83,7 +83,6 @@ func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) {
"libraries": "library", "libraries": "library",
}) })
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }

View File

@ -15,7 +15,7 @@ import (
) )
func TestConvertJob(t *testing.T) { func TestConvertJob(t *testing.T) {
var src = resources.Job{ src := resources.Job{
JobSettings: &jobs.JobSettings{ JobSettings: &jobs.JobSettings{
Name: "my job", Name: "my job",
JobClusters: []jobs.JobCluster{ JobClusters: []jobs.JobCluster{

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertModelServingEndpoint(t *testing.T) { func TestConvertModelServingEndpoint(t *testing.T) {
var src = resources.ModelServingEndpoint{ src := resources.ModelServingEndpoint{
CreateServingEndpoint: &serving.CreateServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{
Name: "name", Name: "name",
Config: serving.EndpointCoreConfigInput{ Config: serving.EndpointCoreConfigInput{

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertModel(t *testing.T) { func TestConvertModel(t *testing.T) {
var src = resources.MlflowModel{ src := resources.MlflowModel{
Model: &ml.Model{ Model: &ml.Model{
Name: "name", Name: "name",
Description: "description", Description: "description",

View File

@ -13,7 +13,7 @@ import (
) )
func TestConvertPermissions(t *testing.T) { func TestConvertPermissions(t *testing.T) {
var src = resources.Job{ src := resources.Job{
Permissions: []resources.Permission{ Permissions: []resources.Permission{
{ {
Level: "CAN_VIEW", Level: "CAN_VIEW",
@ -59,7 +59,7 @@ func TestConvertPermissions(t *testing.T) {
} }
func TestConvertPermissionsNil(t *testing.T) { func TestConvertPermissionsNil(t *testing.T) {
var src = resources.Job{ src := resources.Job{
Permissions: nil, Permissions: nil,
} }
@ -72,7 +72,7 @@ func TestConvertPermissionsNil(t *testing.T) {
} }
func TestConvertPermissionsEmpty(t *testing.T) { func TestConvertPermissionsEmpty(t *testing.T) {
var src = resources.Job{ src := resources.Job{
Permissions: []resources.Permission{}, Permissions: []resources.Permission{},
} }

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertPipeline(t *testing.T) { func TestConvertPipeline(t *testing.T) {
var src = resources.Pipeline{ src := resources.Pipeline{
PipelineSpec: &pipelines.PipelineSpec{ PipelineSpec: &pipelines.PipelineSpec{
Name: "my pipeline", Name: "my pipeline",
Libraries: []pipelines.PipelineLibrary{ Libraries: []pipelines.PipelineLibrary{

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertQualityMonitor(t *testing.T) { func TestConvertQualityMonitor(t *testing.T) {
var src = resources.QualityMonitor{ src := resources.QualityMonitor{
TableName: "test_table_name", TableName: "test_table_name",
CreateMonitor: &catalog.CreateMonitor{ CreateMonitor: &catalog.CreateMonitor{
AssetsDir: "assets_dir", AssetsDir: "assets_dir",

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertRegisteredModel(t *testing.T) { func TestConvertRegisteredModel(t *testing.T) {
var src = resources.RegisteredModel{ src := resources.RegisteredModel{
CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{
Name: "name", Name: "name",
CatalogName: "catalog", CatalogName: "catalog",

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertSchema(t *testing.T) { func TestConvertSchema(t *testing.T) {
var src = resources.Schema{ src := resources.Schema{
CreateSchema: &catalog.CreateSchema{ CreateSchema: &catalog.CreateSchema{
Name: "name", Name: "name",
CatalogName: "catalog", CatalogName: "catalog",

View File

@ -14,7 +14,7 @@ import (
) )
func TestConvertVolume(t *testing.T) { func TestConvertVolume(t *testing.T) {
var src = resources.Volume{ src := resources.Volume{
CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{ CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{
CatalogName: "catalog", CatalogName: "catalog",
Comment: "comment", Comment: "comment",

View File

@ -11,7 +11,7 @@ import (
// definition uses the plural name. This function can convert between the two. // definition uses the plural name. This function can convert between the two.
func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) { func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
var err error var err error
var acc = dyn.V(map[string]dyn.Value{}) acc := dyn.V(map[string]dyn.Value{})
nv, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { nv, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
if len(p) == 0 { if len(p) == 0 {
@ -36,7 +36,6 @@ func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) {
// Pass through all other values. // Pass through all other values.
return v, dyn.ErrSkip return v, dyn.ErrSkip
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }

View File

@ -37,6 +37,6 @@ func (*unbind) Name() string {
return "terraform.Unbind" return "terraform.Unbind"
} }
func Unbind(resourceType string, resourceKey string) bundle.Mutator { func Unbind(resourceType, resourceKey string) bundle.Mutator {
return &unbind{resourceType: resourceType, resourceKey: resourceKey} return &unbind{resourceType: resourceType, resourceKey: resourceKey}
} }

View File

@ -48,7 +48,8 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema.
{ {
Type: jsonschema.StringType, Type: jsonschema.StringType,
Pattern: interpolationPattern("var"), Pattern: interpolationPattern("var"),
}}, },
},
} }
case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType: case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType:
// primitives can have variable values, or references like ${bundle.xyz} // primitives can have variable values, or references like ${bundle.xyz}
@ -149,7 +150,7 @@ func main() {
} }
// Write the schema descriptions to the output file. // Write the schema descriptions to the output file.
err = os.WriteFile(outputFile, b, 0644) err = os.WriteFile(outputFile, b, 0o644)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -11,8 +11,7 @@ import (
"github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn"
) )
type expand struct { type expand struct{}
}
func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic { func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic {
return diag.Diagnostic{ return diag.Diagnostic{
@ -189,7 +188,6 @@ func (e *expand) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
diags = diags.Extend(d) diags = diags.Extend(d)
return dyn.V(output), nil return dyn.V(output), nil
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
@ -197,7 +195,6 @@ func (e *expand) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return v, nil return v, nil
}) })
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))
} }

View File

@ -110,7 +110,8 @@ func TestFilerForVolumeForErrorFromAPI(t *testing.T) {
Summary: "unable to determine if volume at /Volumes/main/my_schema/my_volume exists: error from API", Summary: "unable to determine if volume at /Volumes/main/my_schema/my_volume exists: error from API",
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}, Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}}, diags) },
}, diags)
} }
func TestFilerForVolumeWithVolumeNotFound(t *testing.T) { func TestFilerForVolumeWithVolumeNotFound(t *testing.T) {
@ -136,7 +137,8 @@ func TestFilerForVolumeWithVolumeNotFound(t *testing.T) {
Summary: "volume /Volumes/main/my_schema/doesnotexist does not exist: some error message", Summary: "volume /Volumes/main/my_schema/doesnotexist does not exist: some error message",
Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}, Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}},
Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")},
}}, diags) },
}, diags)
} }
func TestFilerForVolumeNotFoundAndInBundle(t *testing.T) { func TestFilerForVolumeNotFoundAndInBundle(t *testing.T) {

View File

@ -81,7 +81,6 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error
return v, nil return v, nil
}) })
}) })
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -119,7 +118,6 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error
return v, nil return v, nil
}) })
}) })
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -175,7 +173,6 @@ func (u *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
return v, nil return v, nil
}) })
if err != nil { if err != nil {
diags = diags.Extend(diag.FromErr(err)) diags = diags.Extend(diag.FromErr(err))
} }

View File

@ -56,7 +56,6 @@ func filter(currentUser string) dyn.WalkValueFunc {
} }
return v, nil return v, nil
} }
} }

View File

@ -90,7 +90,6 @@ func testFixture(userName string) *bundle.Bundle {
}, },
}, },
} }
} }
func TestFilterCurrentUser(t *testing.T) { func TestFilterCurrentUser(t *testing.T) {

View File

@ -13,14 +13,17 @@ import (
"github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/convert"
) )
const CAN_MANAGE = "CAN_MANAGE" const (
const CAN_VIEW = "CAN_VIEW" CAN_MANAGE = "CAN_MANAGE"
const CAN_RUN = "CAN_RUN" CAN_VIEW = "CAN_VIEW"
CAN_RUN = "CAN_RUN"
)
var unsupportedResources = []string{"clusters", "volumes", "schemas", "quality_monitors", "registered_models"} var unsupportedResources = []string{"clusters", "volumes", "schemas", "quality_monitors", "registered_models"}
var allowedLevels = []string{CAN_MANAGE, CAN_VIEW, CAN_RUN} var (
var levelsMap = map[string](map[string]string){ allowedLevels = []string{CAN_MANAGE, CAN_VIEW, CAN_RUN}
levelsMap = map[string](map[string]string){
"jobs": { "jobs": {
CAN_MANAGE: "CAN_MANAGE", CAN_MANAGE: "CAN_MANAGE",
CAN_VIEW: "CAN_VIEW", CAN_VIEW: "CAN_VIEW",
@ -49,6 +52,7 @@ var levelsMap = map[string](map[string]string){
CAN_VIEW: "CAN_READ", CAN_VIEW: "CAN_READ",
}, },
} }
)
type bundlePermissions struct{} type bundlePermissions struct{}
@ -76,7 +80,6 @@ func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Di
v, err = dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { v, err = dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) {
var permissions []resources.Permission var permissions []resources.Permission
pv, err := dyn.Get(v, "permissions") pv, err := dyn.Get(v, "permissions")
// If the permissions field is not found, we set to an empty array // If the permissions field is not found, we set to an empty array
if err != nil { if err != nil {
pv = dyn.V([]dyn.Value{}) pv = dyn.V([]dyn.Value{})
@ -102,7 +105,6 @@ func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Di
return dyn.Set(v, "permissions", pv) return dyn.Set(v, "permissions", pv)
}) })
if err != nil { if err != nil {
return dyn.InvalidValue, err return dyn.InvalidValue, err
} }
@ -110,7 +112,6 @@ func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Di
return v, nil return v, nil
}) })
if err != nil { if err != nil {
return diag.FromErr(err) return diag.FromErr(err)
} }

View File

@ -9,8 +9,7 @@ import (
"github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/diag"
) )
type validateSharedRootPermissions struct { type validateSharedRootPermissions struct{}
}
func ValidateSharedRootPermissions() bundle.Mutator { func ValidateSharedRootPermissions() bundle.Mutator {
return &validateSharedRootPermissions{} return &validateSharedRootPermissions{}

View File

@ -52,7 +52,7 @@ func (p WorkspacePathPermissions) Compare(perms []resources.Permission) diag.Dia
} }
// containsAll checks if permA contains all permissions in permB. // containsAll checks if permA contains all permissions in permB.
func containsAll(permA []resources.Permission, permB []resources.Permission) (bool, []resources.Permission) { func containsAll(permA, permB []resources.Permission) (bool, []resources.Permission) {
missing := make([]resources.Permission, 0) missing := make([]resources.Permission, 0)
for _, a := range permA { for _, a := range permA {
found := false found := false

View File

@ -117,5 +117,4 @@ func TestWorkspacePathPermissionsCompare(t *testing.T) {
diags := wp.Compare(tc.perms) diags := wp.Compare(tc.perms)
require.Equal(t, tc.expected, diags) require.Equal(t, tc.expected, diags)
} }
} }

View File

@ -12,8 +12,7 @@ import (
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
) )
type workspaceRootPermissions struct { type workspaceRootPermissions struct{}
}
func ApplyWorkspaceRootPermissions() bundle.Mutator { func ApplyWorkspaceRootPermissions() bundle.Mutator {
return &workspaceRootPermissions{} return &workspaceRootPermissions{}

View File

@ -25,7 +25,7 @@ func Bind(opts *terraform.BindOptions) bundle.Mutator {
) )
} }
func Unbind(resourceType string, resourceKey string) bundle.Mutator { func Unbind(resourceType, resourceKey string) bundle.Mutator {
return newPhase( return newPhase(
"unbind", "unbind",
[]bundle.Mutator{ []bundle.Mutator{

View File

@ -110,7 +110,7 @@ func renderSummaryHeaderTemplate(out io.Writer, b *bundle.Bundle) error {
return renderSummaryHeaderTemplate(out, &bundle.Bundle{}) return renderSummaryHeaderTemplate(out, &bundle.Bundle{})
} }
var currentUser = &iam.User{} currentUser := &iam.User{}
if b.Config.Workspace.CurrentUser != nil { if b.Config.Workspace.CurrentUser != nil {
if b.Config.Workspace.CurrentUser.User != nil { if b.Config.Workspace.CurrentUser.User != nil {

View File

@ -376,7 +376,8 @@ func TestRenderDiagnostics(t *testing.T) {
Locations: []dyn.Location{{ Locations: []dyn.Location{{
File: "foo.yaml", File: "foo.yaml",
Line: 1, Line: 1,
Column: 2}}, Column: 2,
}},
}, },
}, },
expected: "Error: failed to load xxx\n" + expected: "Error: failed to load xxx\n" +

View File

@ -71,7 +71,7 @@ func TestRootLookup(t *testing.T) {
defer f.Close() defer f.Close()
// Create directory tree. // Create directory tree.
err = os.MkdirAll("./a/b/c", 0755) err = os.MkdirAll("./a/b/c", 0o755)
require.NoError(t, err) require.NoError(t, err)
// It should find the project root from $PWD. // It should find the project root from $PWD.

View File

@ -289,7 +289,6 @@ func (r *jobRunner) Cancel(ctx context.Context) error {
ActiveOnly: true, ActiveOnly: true,
JobId: jobID, JobId: jobID,
}) })
if err != nil { if err != nil {
return err return err
} }

View File

@ -131,7 +131,7 @@ func (r *jobRunner) posArgsHandler() argsHandler {
} }
// Handle task parameters otherwise. // Handle task parameters otherwise.
var seen = make(map[jobTaskType]bool) seen := make(map[jobTaskType]bool)
for _, t := range job.Tasks { for _, t := range job.Tasks {
if t.NotebookTask != nil { if t.NotebookTask != nil {
seen[jobTaskTypeNotebook] = true seen[jobTaskTypeNotebook] = true

View File

@ -80,7 +80,7 @@ func (o *JobOptions) validatePipelineParams() (*jobs.PipelineParams, error) {
return nil, nil return nil, nil
} }
var defaultErr = fmt.Errorf("job run argument --pipeline-params only supports `full_refresh=<bool>`") defaultErr := fmt.Errorf("job run argument --pipeline-params only supports `full_refresh=<bool>`")
v, ok := o.pipelineParams["full_refresh"] v, ok := o.pipelineParams["full_refresh"]
if !ok { if !ok {
return nil, defaultErr return nil, defaultErr

View File

@ -7,13 +7,15 @@ import (
"github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/jobs"
) )
type NotebookOutput jobs.NotebookOutput type (
type DbtOutput jobs.DbtOutput NotebookOutput jobs.NotebookOutput
type SqlOutput jobs.SqlOutput DbtOutput jobs.DbtOutput
type LogsOutput struct { SqlOutput jobs.SqlOutput
LogsOutput struct {
Logs string `json:"logs"` Logs string `json:"logs"`
LogsTruncated bool `json:"logs_truncated"` LogsTruncated bool `json:"logs_truncated"`
} }
)
func structToString(val any) (string, error) { func structToString(val any) (string, error) {
b, err := json.MarshalIndent(val, "", " ") b, err := json.MarshalIndent(val, "", " ")

View File

@ -41,7 +41,7 @@ func (r *pipelineRunner) logEvent(ctx context.Context, event pipelines.PipelineE
} }
} }
func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId string, updateId string) error { func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId, updateId string) error {
w := r.bundle.WorkspaceClient() w := r.bundle.WorkspaceClient()
// Note: For a 100 percent correct and complete solution we should use the // Note: For a 100 percent correct and complete solution we should use the
@ -85,7 +85,7 @@ func (r *pipelineRunner) Name() string {
} }
func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) { func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) {
var pipelineID = r.pipeline.ID pipelineID := r.pipeline.ID
// Include resource key in logger. // Include resource key in logger.
ctx = log.NewContext(ctx, log.GetLogger(ctx).With("resource", r.Key())) ctx = log.NewContext(ctx, log.GetLogger(ctx).With("resource", r.Key()))
@ -173,7 +173,6 @@ func (r *pipelineRunner) Cancel(ctx context.Context) error {
wait, err := w.Pipelines.Stop(ctx, pipelines.StopRequest{ wait, err := w.Pipelines.Stop(ctx, pipelines.StopRequest{
PipelineId: r.pipeline.ID, PipelineId: r.pipeline.ID,
}) })
if err != nil { if err != nil {
return err return err
} }

View File

@ -51,7 +51,7 @@ type UpdateTracker struct {
w *databricks.WorkspaceClient w *databricks.WorkspaceClient
} }
func NewUpdateTracker(pipelineId string, updateId string, w *databricks.WorkspaceClient) *UpdateTracker { func NewUpdateTracker(pipelineId, updateId string, w *databricks.WorkspaceClient) *UpdateTracker {
return &UpdateTracker{ return &UpdateTracker{
w: w, w: w,
PipelineId: pipelineId, PipelineId: pipelineId,

View File

@ -93,7 +93,6 @@ func TestRunAsForAllowedWithTargetOverride(t *testing.T) {
assert.Equal(t, ml.Model{Name: "skynet"}, *b.Config.Resources.Models["model_one"].Model) assert.Equal(t, ml.Model{Name: "skynet"}, *b.Config.Resources.Models["model_one"].Model)
assert.Equal(t, catalog.CreateRegisteredModelRequest{Name: "skynet (in UC)"}, *b.Config.Resources.RegisteredModels["model_two"].CreateRegisteredModelRequest) assert.Equal(t, catalog.CreateRegisteredModelRequest{Name: "skynet (in UC)"}, *b.Config.Resources.RegisteredModels["model_two"].CreateRegisteredModelRequest)
assert.Equal(t, ml.Experiment{Name: "experiment_one"}, *b.Config.Resources.Experiments["experiment_one"].Experiment) assert.Equal(t, ml.Experiment{Name: "experiment_one"}, *b.Config.Resources.Experiments["experiment_one"].Experiment)
} }
func TestRunAsErrorForPipelines(t *testing.T) { func TestRunAsErrorForPipelines(t *testing.T) {
@ -220,7 +219,6 @@ func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) {
for _, tc := range tcases { for _, tc := range tcases {
t.Run(tc.name, func(t *testing.T) { t.Run(tc.name, func(t *testing.T) {
bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name) bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name)
b := load(t, bundlePath) b := load(t, bundlePath)

View File

@ -151,7 +151,7 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) {
} }
func TestVariableTargetOverrides(t *testing.T) { func TestVariableTargetOverrides(t *testing.T) {
var tcases = []struct { tcases := []struct {
targetName string targetName string
pipelineName string pipelineName string
pipelineContinuous bool pipelineContinuous bool

View File

@ -14,8 +14,7 @@ import (
"golang.org/x/mod/semver" "golang.org/x/mod/semver"
) )
type wrapperWarning struct { type wrapperWarning struct{}
}
func WrapperWarning() bundle.Mutator { func WrapperWarning() bundle.Mutator {
return &wrapperWarning{} return &wrapperWarning{}
@ -62,7 +61,6 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool {
if task.ExistingClusterId != "" { if task.ExistingClusterId != "" {
version, err := getSparkVersionForCluster(ctx, b.WorkspaceClient(), task.ExistingClusterId) version, err := getSparkVersionForCluster(ctx, b.WorkspaceClient(), task.ExistingClusterId)
// If there's error getting spark version for cluster, do not mark it as incompatible // If there's error getting spark version for cluster, do not mark it as incompatible
if err != nil { if err != nil {
log.Warnf(ctx, "unable to get spark version for cluster %s, err: %s", task.ExistingClusterId, err.Error()) log.Warnf(ctx, "unable to get spark version for cluster %s, err: %s", task.ExistingClusterId, err.Error())

View File

@ -127,7 +127,8 @@ func TestNoPanicWithNoPythonWheelTasks(t *testing.T) {
Tasks: []jobs.Task{ Tasks: []jobs.Task{
{ {
TaskKey: "notebook_task", TaskKey: "notebook_task",
NotebookTask: &jobs.NotebookTask{}}, NotebookTask: &jobs.NotebookTask{},
},
}, },
}, },
}, },

View File

@ -62,7 +62,7 @@ func (m *trampoline) generateNotebookWrapper(ctx context.Context, b *bundle.Bund
notebookName := fmt.Sprintf("notebook_%s_%s", task.JobKey, task.Task.TaskKey) notebookName := fmt.Sprintf("notebook_%s_%s", task.JobKey, task.Task.TaskKey)
localNotebookPath := filepath.Join(internalDir, notebookName+".py") localNotebookPath := filepath.Join(internalDir, notebookName+".py")
err = os.MkdirAll(filepath.Dir(localNotebookPath), 0755) err = os.MkdirAll(filepath.Dir(localNotebookPath), 0o755)
if err != nil { if err != nil {
return err return err
} }

View File

@ -52,7 +52,8 @@ func TestGenerateTrampoline(t *testing.T) {
PythonWheelTask: &jobs.PythonWheelTask{ PythonWheelTask: &jobs.PythonWheelTask{
PackageName: "test", PackageName: "test",
EntryPoint: "run", EntryPoint: "run",
}}, },
},
} }
b := &bundle.Bundle{ b := &bundle.Bundle{

View File

@ -39,7 +39,7 @@ func makeCommand(method string) *cobra.Command {
Args: root.ExactArgs(1), Args: root.ExactArgs(1),
Short: fmt.Sprintf("Perform %s request", method), Short: fmt.Sprintf("Perform %s request", method),
RunE: func(cmd *cobra.Command, args []string) error { RunE: func(cmd *cobra.Command, args []string) error {
var path = args[0] path := args[0]
var request any var request any
diags := payload.Unmarshal(&request) diags := payload.Unmarshal(&request)

Some files were not shown because too many files have changed in this diff Show More