diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index a2ba58aa5..68cd2f4be 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -f2385add116e3716c8a90a0b68e204deb40f996c \ No newline at end of file +7016dcbf2e011459416cf408ce21143bcc4b3a25 \ No newline at end of file diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..6304b3604 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,8 @@ +# Enable gofumpt and goimports in golangci-lint (#1999) +2e018cfaec200a02ee2bd5b389e7da3c6f15f460 + +# Enable errcheck everywhere and fix or silent remaining issues (#1987) +8d5351c1c3d7befda4baae5d6adb99367aa50b3c + +# Add error checking in tests and enable errcheck there (#1980) +1b2be1b2cb4b7909df2a8ad4cb6a0f43e8fcf0c6 diff --git a/.gitattributes b/.gitattributes index 2755c02d7..3bc3c73c3 100755 --- a/.gitattributes +++ b/.gitattributes @@ -37,6 +37,9 @@ cmd/workspace/apps/apps.go linguist-generated=true cmd/workspace/artifact-allowlists/artifact-allowlists.go linguist-generated=true cmd/workspace/automatic-cluster-update/automatic-cluster-update.go linguist-generated=true cmd/workspace/catalogs/catalogs.go linguist-generated=true +cmd/workspace/clean-room-assets/clean-room-assets.go linguist-generated=true +cmd/workspace/clean-room-task-runs/clean-room-task-runs.go linguist-generated=true +cmd/workspace/clean-rooms/clean-rooms.go linguist-generated=true cmd/workspace/cluster-policies/cluster-policies.go linguist-generated=true cmd/workspace/clusters/clusters.go linguist-generated=true cmd/workspace/cmd.go linguist-generated=true diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index ebb3e75d4..a497a7d7b 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -44,7 +44,7 @@ jobs: run: | echo "GOPATH=$(go env GOPATH)" >> $GITHUB_ENV echo "$(go env GOPATH)/bin" >> $GITHUB_PATH - go install gotest.tools/gotestsum@latest + go install gotest.tools/gotestsum@v1.12.0 - name: Pull external libraries run: | diff --git a/.golangci.yaml b/.golangci.yaml index 82e4d9848..9e69e5146 100644 --- a/.golangci.yaml +++ b/.golangci.yaml @@ -2,21 +2,37 @@ linters: disable-all: true enable: - bodyclose - # errcheck and govet are part of default setup and should be included but give too many errors now - # once errors are fixed, they should be enabled here: - #- errcheck + - errcheck - gosimple - #- govet + - govet - ineffassign - staticcheck - unused - gofmt + - gofumpt + - goimports linters-settings: + govet: + enable-all: true + disable: + - fieldalignment + - shadow gofmt: rewrite-rules: - pattern: 'a[b:len(a)]' replacement: 'a[b:]' - pattern: 'interface{}' replacement: 'any' + errcheck: + exclude-functions: + - (*github.com/spf13/cobra.Command).RegisterFlagCompletionFunc + - (*github.com/spf13/cobra.Command).MarkFlagRequired + - (*github.com/spf13/pflag.FlagSet).MarkDeprecated + - (*github.com/spf13/pflag.FlagSet).MarkHidden + gofumpt: + module-path: github.com/databricks/cli + extra-rules: true + #goimports: + # local-prefixes: github.com/databricks/cli issues: exclude-dirs-use-default: false # recommended by docs https://golangci-lint.run/usage/false-positives/ diff --git a/.vscode/settings.json b/.vscode/settings.json index 853e84de8..f8b04f126 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -7,11 +7,14 @@ "go.lintFlags": [ "--fast" ], + "go.useLanguageServer": true, + "gopls": { + "formatting.gofumpt": true + }, "files.trimTrailingWhitespace": true, "files.insertFinalNewline": true, "files.trimFinalNewlines": true, "python.envFile": "${workspaceRoot}/.env", - "databricks.python.envFile": "${workspaceFolder}/.env", "python.analysis.stubPath": ".vscode", "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------" diff --git a/Makefile b/Makefile index ccedede1d..dda0fc737 100644 --- a/Makefile +++ b/Makefile @@ -7,13 +7,13 @@ fmt: @gofmt -w $(shell find . -type f -name '*.go' -not -path "./vendor/*") lint: vendor + @echo "✓ Linting source code with https://golangci-lint.run/ (with --fix)..." + @golangci-lint run --fix ./... + +lintcheck: vendor @echo "✓ Linting source code with https://golangci-lint.run/ ..." @golangci-lint run ./... -lintfix: vendor - @echo "✓ Linting source code with 'golangci-lint run --fix' ..." - @golangci-lint run --fix ./... - test: lint testonly testonly: @@ -37,6 +37,6 @@ vendor: @go mod vendor integration: - gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./internal/..." -- -run "TestAcc.*" -p 8 -parallel 8 -timeout=2h + gotestsum --format github-actions --rerun-fails --jsonfile output.json --packages "./integration/..." -- -p 8 -parallel 8 -timeout=2h -.PHONY: fmt lint lintfix test testonly coverage build snapshot vendor integration +.PHONY: fmt lint lintcheck test testonly coverage build snapshot vendor integration diff --git a/bundle/artifacts/all.go b/bundle/artifacts/all.go index 305193e2e..768ccdfe3 100644 --- a/bundle/artifacts/all.go +++ b/bundle/artifacts/all.go @@ -3,7 +3,6 @@ package artifacts import ( "context" "fmt" - "slices" "github.com/databricks/cli/bundle" diff --git a/bundle/artifacts/autodetect.go b/bundle/artifacts/autodetect.go index 569a480f0..c8d235616 100644 --- a/bundle/artifacts/autodetect.go +++ b/bundle/artifacts/autodetect.go @@ -13,8 +13,7 @@ func DetectPackages() bundle.Mutator { return &autodetect{} } -type autodetect struct { -} +type autodetect struct{} func (m *autodetect) Name() string { return "artifacts.DetectPackages" diff --git a/bundle/artifacts/expand_globs.go b/bundle/artifacts/expand_globs.go index cdf3d4590..7d44db0be 100644 --- a/bundle/artifacts/expand_globs.go +++ b/bundle/artifacts/expand_globs.go @@ -96,7 +96,6 @@ func (m *expandGlobs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost // Set the expanded globs back into the configuration. return dyn.SetByPath(v, base, dyn.V(output)) }) - if err != nil { return diag.FromErr(err) } diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go index 88dc742c1..202ea12bc 100644 --- a/bundle/artifacts/whl/autodetect.go +++ b/bundle/artifacts/whl/autodetect.go @@ -15,8 +15,7 @@ import ( "github.com/databricks/cli/libs/log" ) -type detectPkg struct { -} +type detectPkg struct{} func DetectPackage() bundle.Mutator { return &detectPkg{} @@ -42,7 +41,7 @@ func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic return nil } - log.Infof(ctx, fmt.Sprintf("Found Python wheel project at %s", b.BundleRootPath)) + log.Infof(ctx, "Found Python wheel project at %s", b.BundleRootPath) module := extractModuleName(setupPy) if b.Config.Artifacts == nil { diff --git a/bundle/bundle.go b/bundle/bundle.go index 76c87c24c..573bcef2f 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -186,7 +186,7 @@ func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error) // Make directory if it doesn't exist yet. dir := filepath.Join(parts...) - err := os.MkdirAll(dir, 0700) + err := os.MkdirAll(dir, 0o700) if err != nil { return "", err } @@ -203,7 +203,7 @@ func (b *Bundle) InternalDir(ctx context.Context) (string, error) { } dir := filepath.Join(cacheDir, internalFolder) - err = os.MkdirAll(dir, 0700) + err = os.MkdirAll(dir, 0o700) if err != nil { return dir, err } diff --git a/bundle/config/experimental.go b/bundle/config/experimental.go index 061bbdae0..4c787168f 100644 --- a/bundle/config/experimental.go +++ b/bundle/config/experimental.go @@ -47,8 +47,10 @@ type PyDABs struct { Import []string `json:"import,omitempty"` } -type Command string -type ScriptHook string +type ( + Command string + ScriptHook string +) // These hook names are subject to change and currently experimental const ( diff --git a/bundle/config/generate/job.go b/bundle/config/generate/job.go index 6cd7c1b32..0cdcbf3ad 100644 --- a/bundle/config/generate/job.go +++ b/bundle/config/generate/job.go @@ -6,8 +6,10 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" ) -var jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"}) -var taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"}) +var ( + jobOrder = yamlsaver.NewOrder([]string{"name", "job_clusters", "compute", "tasks"}) + taskOrder = yamlsaver.NewOrder([]string{"task_key", "depends_on", "existing_cluster_id", "new_cluster", "job_cluster_key"}) +) func ConvertJobToValue(job *jobs.Job) (dyn.Value, error) { value := make(map[string]dyn.Value) diff --git a/bundle/config/loader/process_root_includes.go b/bundle/config/loader/process_root_includes.go index c14fb7ce1..c608a3de6 100644 --- a/bundle/config/loader/process_root_includes.go +++ b/bundle/config/loader/process_root_includes.go @@ -27,7 +27,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) diag. var out []bundle.Mutator // Map with files we've already seen to avoid loading them twice. - var seen = map[string]bool{} + seen := map[string]bool{} for _, file := range config.FileNames { seen[file] = true diff --git a/bundle/config/mutator/apply_presets_test.go b/bundle/config/mutator/apply_presets_test.go index 91d5b62e5..c26f20383 100644 --- a/bundle/config/mutator/apply_presets_test.go +++ b/bundle/config/mutator/apply_presets_test.go @@ -481,5 +481,4 @@ func TestApplyPresetsSourceLinkedDeployment(t *testing.T) { require.Equal(t, tt.expectedValue, b.Config.Presets.SourceLinkedDeployment) }) } - } diff --git a/bundle/config/mutator/compute_id_compat.go b/bundle/config/mutator/compute_id_compat.go index 3afe02e9e..8f1ff5868 100644 --- a/bundle/config/mutator/compute_id_compat.go +++ b/bundle/config/mutator/compute_id_compat.go @@ -42,7 +42,6 @@ func rewriteComputeIdToClusterId(v dyn.Value, p dyn.Path) (dyn.Value, diag.Diagn var diags diag.Diagnostics computeIdPath := p.Append(dyn.Key("compute_id")) computeId, err := dyn.GetByPath(v, computeIdPath) - // If the "compute_id" key is not set, we don't need to do anything. if err != nil { return v, nil diff --git a/bundle/config/mutator/expand_pipeline_glob_paths_test.go b/bundle/config/mutator/expand_pipeline_glob_paths_test.go index 9f70b74ae..7cf3c9f3e 100644 --- a/bundle/config/mutator/expand_pipeline_glob_paths_test.go +++ b/bundle/config/mutator/expand_pipeline_glob_paths_test.go @@ -17,7 +17,7 @@ import ( ) func touchEmptyFile(t *testing.T, path string) { - err := os.MkdirAll(filepath.Dir(path), 0700) + err := os.MkdirAll(filepath.Dir(path), 0o700) require.NoError(t, err) f, err := os.Create(path) require.NoError(t, err) diff --git a/bundle/config/mutator/expand_workspace_root.go b/bundle/config/mutator/expand_workspace_root.go index 3f0547de1..a29d129b0 100644 --- a/bundle/config/mutator/expand_workspace_root.go +++ b/bundle/config/mutator/expand_workspace_root.go @@ -28,7 +28,7 @@ func (m *expandWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle) diag. } currentUser := b.Config.Workspace.CurrentUser - if currentUser == nil || currentUser.UserName == "" { + if currentUser == nil || currentUser.User == nil || currentUser.UserName == "" { return diag.Errorf("unable to expand workspace root: current user not set") } diff --git a/bundle/config/mutator/initialize_urls.go b/bundle/config/mutator/initialize_urls.go index 319305912..35ff53d0b 100644 --- a/bundle/config/mutator/initialize_urls.go +++ b/bundle/config/mutator/initialize_urls.go @@ -10,8 +10,7 @@ import ( "github.com/databricks/cli/libs/diag" ) -type initializeURLs struct { -} +type initializeURLs struct{} // InitializeURLs makes sure the URL field of each resource is configured. // NOTE: since this depends on an extra API call, this mutator adds some extra @@ -32,11 +31,14 @@ func (m *initializeURLs) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn } orgId := strconv.FormatInt(workspaceId, 10) host := b.WorkspaceClient().Config.CanonicalHostName() - initializeForWorkspace(b, orgId, host) + err = initializeForWorkspace(b, orgId, host) + if err != nil { + return diag.FromErr(err) + } return nil } -func initializeForWorkspace(b *bundle.Bundle, orgId string, host string) error { +func initializeForWorkspace(b *bundle.Bundle, orgId, host string) error { baseURL, err := url.Parse(host) if err != nil { return err diff --git a/bundle/config/mutator/initialize_urls_test.go b/bundle/config/mutator/initialize_urls_test.go index ec4e790c4..f07a7deb3 100644 --- a/bundle/config/mutator/initialize_urls_test.go +++ b/bundle/config/mutator/initialize_urls_test.go @@ -110,7 +110,8 @@ func TestInitializeURLs(t *testing.T) { "dashboard1": "https://mycompany.databricks.com/dashboardsv3/01ef8d56871e1d50ae30ce7375e42478/published?o=123456", } - initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/") + err := initializeForWorkspace(b, "123456", "https://mycompany.databricks.com/") + require.NoError(t, err) for _, group := range b.Config.Resources.AllResources() { for key, r := range group.Resources { @@ -133,7 +134,8 @@ func TestInitializeURLsWithoutOrgId(t *testing.T) { }, } - initializeForWorkspace(b, "123456", "https://adb-123456.azuredatabricks.net/") + err := initializeForWorkspace(b, "123456", "https://adb-123456.azuredatabricks.net/") + require.NoError(t, err) require.Equal(t, "https://adb-123456.azuredatabricks.net/jobs/1", b.Config.Resources.Jobs["job1"].URL) } diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index 82255552a..5c263ac03 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -2,6 +2,8 @@ package mutator import ( "context" + "errors" + "os" "path/filepath" "github.com/databricks/cli/bundle" @@ -24,7 +26,9 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagn var diags diag.Diagnostics info, err := git.FetchRepositoryInfo(ctx, b.BundleRoot.Native(), b.WorkspaceClient()) if err != nil { - diags = append(diags, diag.WarningFromErr(err)...) + if !errors.Is(err, os.ErrNotExist) { + diags = append(diags, diag.WarningFromErr(err)...) + } } if info.WorktreeRoot == "" { diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index 5700cdf26..343303402 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/env" ) @@ -22,7 +23,7 @@ func (m *overrideCompute) Name() string { func overrideJobCompute(j *resources.Job, compute string) { for i := range j.Tasks { - var task = &j.Tasks[i] + task := &j.Tasks[i] if task.ForEachTask != nil { task = &task.ForEachTask.Task @@ -38,18 +39,32 @@ func overrideJobCompute(j *resources.Job, compute string) { } func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { - if b.Config.Bundle.Mode != config.Development { + var diags diag.Diagnostics + + if b.Config.Bundle.Mode == config.Production { if b.Config.Bundle.ClusterId != "" { - return diag.Errorf("cannot override compute for an target that does not use 'mode: development'") + // Overriding compute via a command-line flag for production works, but is not recommended. + diags = diags.Extend(diag.Diagnostics{{ + Summary: "Setting a cluster override for a target that uses 'mode: production' is not recommended", + Detail: "It is recommended to always use the same compute for production target for consistency.", + Severity: diag.Warning, + }}) } - return nil } if v := env.Get(ctx, "DATABRICKS_CLUSTER_ID"); v != "" { + // For historical reasons, we allow setting the cluster ID via the DATABRICKS_CLUSTER_ID + // when development mode is used. Sometimes, this is done by accident, so we log an info message. + if b.Config.Bundle.Mode == config.Development { + cmdio.LogString(ctx, "Setting a cluster override because DATABRICKS_CLUSTER_ID is set. It is recommended to use --cluster-id instead, which works in any target mode.") + } else { + // We don't allow using DATABRICKS_CLUSTER_ID in any other mode, it's too error-prone. + return diag.Warningf("The DATABRICKS_CLUSTER_ID variable is set but is ignored since the current target does not use 'mode: development'") + } b.Config.Bundle.ClusterId = v } if b.Config.Bundle.ClusterId == "" { - return nil + return diags } r := b.Config.Resources @@ -57,5 +72,5 @@ func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) diag.Diag overrideJobCompute(r.Jobs[i], b.Config.Bundle.ClusterId) } - return nil + return diags } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 369447d7e..1fdeb373c 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -8,13 +8,14 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/diag" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestOverrideDevelopment(t *testing.T) { +func TestOverrideComputeModeDevelopment(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "") b := &bundle.Bundle{ Config: config.Root{ @@ -62,10 +63,13 @@ func TestOverrideDevelopment(t *testing.T) { assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey) } -func TestOverrideDevelopmentEnv(t *testing.T) { +func TestOverrideComputeModeDefaultIgnoresVariable(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") b := &bundle.Bundle{ Config: config.Root{ + Bundle: config.Bundle{ + Mode: "", + }, Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job1": {JobSettings: &jobs.JobSettings{ @@ -86,11 +90,12 @@ func TestOverrideDevelopmentEnv(t *testing.T) { m := mutator.OverrideCompute() diags := bundle.Apply(context.Background(), b, m) - require.NoError(t, diags.Error()) + require.Len(t, diags, 1) + assert.Equal(t, "The DATABRICKS_CLUSTER_ID variable is set but is ignored since the current target does not use 'mode: development'", diags[0].Summary) assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } -func TestOverridePipelineTask(t *testing.T) { +func TestOverrideComputePipelineTask(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") b := &bundle.Bundle{ Config: config.Root{ @@ -115,7 +120,7 @@ func TestOverridePipelineTask(t *testing.T) { assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) } -func TestOverrideForEachTask(t *testing.T) { +func TestOverrideComputeForEachTask(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") b := &bundle.Bundle{ Config: config.Root{ @@ -140,10 +145,11 @@ func TestOverrideForEachTask(t *testing.T) { assert.Empty(t, b.Config.Resources.Jobs["job1"].Tasks[0].ForEachTask.Task) } -func TestOverrideProduction(t *testing.T) { +func TestOverrideComputeModeProduction(t *testing.T) { b := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ + Mode: config.Production, ClusterId: "newClusterID", }, Resources: config.Resources{ @@ -166,13 +172,19 @@ func TestOverrideProduction(t *testing.T) { m := mutator.OverrideCompute() diags := bundle.Apply(context.Background(), b, m) - require.True(t, diags.HasError()) + require.Len(t, diags, 1) + assert.Equal(t, "Setting a cluster override for a target that uses 'mode: production' is not recommended", diags[0].Summary) + assert.Equal(t, diag.Warning, diags[0].Severity) + assert.Equal(t, "newClusterID", b.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) } -func TestOverrideProductionEnv(t *testing.T) { +func TestOverrideComputeModeProductionIgnoresVariable(t *testing.T) { t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") b := &bundle.Bundle{ Config: config.Root{ + Bundle: config.Bundle{ + Mode: config.Production, + }, Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job1": {JobSettings: &jobs.JobSettings{ @@ -193,5 +205,7 @@ func TestOverrideProductionEnv(t *testing.T) { m := mutator.OverrideCompute() diags := bundle.Apply(context.Background(), b, m) - require.NoError(t, diags.Error()) + require.Len(t, diags, 1) + assert.Equal(t, "The DATABRICKS_CLUSTER_ID variable is set but is ignored since the current target does not use 'mode: development'", diags[0].Summary) + assert.Equal(t, "cluster2", b.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } diff --git a/bundle/config/mutator/paths/job_paths_visitor.go b/bundle/config/mutator/paths/job_paths_visitor.go index 275a8fa53..1d713aaf5 100644 --- a/bundle/config/mutator/paths/job_paths_visitor.go +++ b/bundle/config/mutator/paths/job_paths_visitor.go @@ -95,7 +95,7 @@ func jobRewritePatterns() []jobRewritePattern { // VisitJobPaths visits all paths in job resources and applies a function to each path. func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) { var err error - var newValue = value + newValue := value for _, rewritePattern := range jobRewritePatterns() { newValue, err = dyn.MapByPattern(newValue, rewritePattern.pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { @@ -105,7 +105,6 @@ func VisitJobPaths(value dyn.Value, fn VisitFunc) (dyn.Value, error) { return fn(p, rewritePattern.kind, v) }) - if err != nil { return dyn.InvalidValue, err } diff --git a/bundle/config/mutator/prepend_workspace_prefix.go b/bundle/config/mutator/prepend_workspace_prefix.go index e0be2572d..b093ec26a 100644 --- a/bundle/config/mutator/prepend_workspace_prefix.go +++ b/bundle/config/mutator/prepend_workspace_prefix.go @@ -57,14 +57,12 @@ func (m *prependWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di return dyn.NewValue(fmt.Sprintf("/Workspace%s", path), v.Locations()), nil }) - if err != nil { return dyn.InvalidValue, err } } return v, nil }) - if err != nil { return diag.FromErr(err) } diff --git a/bundle/config/mutator/python/python_diagnostics_test.go b/bundle/config/mutator/python/python_diagnostics_test.go index b73b0f73c..fd6def8da 100644 --- a/bundle/config/mutator/python/python_diagnostics_test.go +++ b/bundle/config/mutator/python/python_diagnostics_test.go @@ -30,7 +30,6 @@ type parsePythonDiagnosticsTest struct { } func TestParsePythonDiagnostics(t *testing.T) { - testCases := []parsePythonDiagnosticsTest{ { name: "short error with location", diff --git a/bundle/config/mutator/python/python_mutator.go b/bundle/config/mutator/python/python_mutator.go index da6c4d210..69c1a5dd6 100644 --- a/bundle/config/mutator/python/python_mutator.go +++ b/bundle/config/mutator/python/python_mutator.go @@ -9,12 +9,11 @@ import ( "io" "os" "path/filepath" + "strings" "github.com/databricks/databricks-sdk-go/logger" "github.com/fatih/color" - "strings" - "github.com/databricks/cli/libs/python" "github.com/databricks/cli/bundle/env" @@ -94,11 +93,10 @@ func (m *pythonMutator) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagno // mutateDiags is used because Mutate returns 'error' instead of 'diag.Diagnostics' var mutateDiags diag.Diagnostics - var mutateDiagsHasError = errors.New("unexpected error") + mutateDiagsHasError := errors.New("unexpected error") err := b.Config.Mutate(func(leftRoot dyn.Value) (dyn.Value, error) { pythonPath, err := detectExecutable(ctx, experimental.PyDABs.VEnvPath) - if err != nil { return dyn.InvalidValue, fmt.Errorf("failed to get Python interpreter path: %w", err) } @@ -141,7 +139,7 @@ func createCacheDir(ctx context.Context) (string, error) { // use 'default' as target name cacheDir := filepath.Join(tempDir, "default", "pydabs") - err := os.MkdirAll(cacheDir, 0700) + err := os.MkdirAll(cacheDir, 0o700) if err != nil { return "", err } @@ -152,7 +150,7 @@ func createCacheDir(ctx context.Context) (string, error) { return os.MkdirTemp("", "-pydabs") } -func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir string, rootPath string, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) { +func (m *pythonMutator) runPythonMutator(ctx context.Context, cacheDir, rootPath, pythonPath string, root dyn.Value) (dyn.Value, diag.Diagnostics) { inputPath := filepath.Join(cacheDir, "input.json") outputPath := filepath.Join(cacheDir, "output.json") diagnosticsPath := filepath.Join(cacheDir, "diagnostics.json") @@ -263,10 +261,10 @@ func writeInputFile(inputPath string, input dyn.Value) error { return fmt.Errorf("failed to marshal input: %w", err) } - return os.WriteFile(inputPath, rootConfigJson, 0600) + return os.WriteFile(inputPath, rootConfigJson, 0o600) } -func loadOutputFile(rootPath string, outputPath string) (dyn.Value, diag.Diagnostics) { +func loadOutputFile(rootPath, outputPath string) (dyn.Value, diag.Diagnostics) { outputFile, err := os.Open(outputPath) if err != nil { return dyn.InvalidValue, diag.FromErr(fmt.Errorf("failed to open output file: %w", err)) @@ -381,7 +379,7 @@ func createLoadOverrideVisitor(ctx context.Context) merge.OverrideVisitor { return right, nil }, - VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { + VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String()) }, } @@ -430,7 +428,7 @@ func createInitOverrideVisitor(ctx context.Context) merge.OverrideVisitor { return right, nil }, - VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { + VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) { if !valuePath.HasPrefix(jobsPath) { return dyn.InvalidValue, fmt.Errorf("unexpected change at %q (update)", valuePath.String()) } diff --git a/bundle/config/mutator/python/python_mutator_test.go b/bundle/config/mutator/python/python_mutator_test.go index 7a419d799..0c6df9833 100644 --- a/bundle/config/mutator/python/python_mutator_test.go +++ b/bundle/config/mutator/python/python_mutator_test.go @@ -106,7 +106,6 @@ func TestPythonMutator_load(t *testing.T) { Column: 5, }, }, diags[0].Locations) - } func TestPythonMutator_load_disallowed(t *testing.T) { @@ -588,7 +587,7 @@ or activate the environment before running CLI commands: assert.Equal(t, expected, out) } -func withProcessStub(t *testing.T, args []string, output string, diagnostics string) context.Context { +func withProcessStub(t *testing.T, args []string, output, diagnostics string) context.Context { ctx := context.Background() ctx, stub := process.WithStub(ctx) @@ -611,10 +610,10 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str assert.NoError(t, err) if reflect.DeepEqual(actual.Args, args) { - err := os.WriteFile(outputPath, []byte(output), 0600) + err := os.WriteFile(outputPath, []byte(output), 0o600) require.NoError(t, err) - err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0600) + err = os.WriteFile(diagnosticsPath, []byte(diagnostics), 0o600) require.NoError(t, err) return nil @@ -626,7 +625,7 @@ func withProcessStub(t *testing.T, args []string, output string, diagnostics str return ctx } -func loadYaml(name string, content string) *bundle.Bundle { +func loadYaml(name, content string) *bundle.Bundle { v, diag := config.LoadFromBytes(name, []byte(content)) if diag.Error() != nil { @@ -650,17 +649,17 @@ func withFakeVEnv(t *testing.T, venvPath string) { interpreterPath := interpreterPath(venvPath) - err = os.MkdirAll(filepath.Dir(interpreterPath), 0755) + err = os.MkdirAll(filepath.Dir(interpreterPath), 0o755) if err != nil { panic(err) } - err = os.WriteFile(interpreterPath, []byte(""), 0755) + err = os.WriteFile(interpreterPath, []byte(""), 0o755) if err != nil { panic(err) } - err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0755) + err = os.WriteFile(filepath.Join(venvPath, "pyvenv.cfg"), []byte(""), 0o755) if err != nil { panic(err) } diff --git a/bundle/config/mutator/resolve_resource_references.go b/bundle/config/mutator/resolve_resource_references.go index 89eaa346c..bf902f928 100644 --- a/bundle/config/mutator/resolve_resource_references.go +++ b/bundle/config/mutator/resolve_resource_references.go @@ -36,8 +36,7 @@ func (m *resolveResourceReferences) Apply(ctx context.Context, b *bundle.Bundle) return fmt.Errorf("failed to resolve %s, err: %w", v.Lookup, err) } - v.Set(id) - return nil + return v.Set(id) }) } diff --git a/bundle/config/mutator/resolve_resource_references_test.go b/bundle/config/mutator/resolve_resource_references_test.go index ee2f0e2ea..624e337c7 100644 --- a/bundle/config/mutator/resolve_resource_references_test.go +++ b/bundle/config/mutator/resolve_resource_references_test.go @@ -108,7 +108,8 @@ func TestNoLookupIfVariableIsSet(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) - b.Config.Variables["my-cluster-id"].Set("random value") + err := b.Config.Variables["my-cluster-id"].Set("random value") + require.NoError(t, err) diags := bundle.Apply(context.Background(), b, ResolveResourceReferences()) require.NoError(t, diags.Error()) diff --git a/bundle/config/mutator/resolve_variable_references.go b/bundle/config/mutator/resolve_variable_references.go index 5e5b76109..8c207e375 100644 --- a/bundle/config/mutator/resolve_variable_references.go +++ b/bundle/config/mutator/resolve_variable_references.go @@ -32,11 +32,12 @@ func ResolveVariableReferencesInLookup() bundle.Mutator { } func ResolveVariableReferencesInComplexVariables() bundle.Mutator { - return &resolveVariableReferences{prefixes: []string{ - "bundle", - "workspace", - "variables", - }, + return &resolveVariableReferences{ + prefixes: []string{ + "bundle", + "workspace", + "variables", + }, pattern: dyn.NewPattern(dyn.Key("variables"), dyn.AnyKey(), dyn.Key("value")), lookupFn: lookupForComplexVariables, skipFn: skipResolvingInNonComplexVariables, @@ -173,7 +174,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) return dyn.InvalidValue, dynvar.ErrSkipResolution }) }) - if err != nil { return dyn.InvalidValue, err } @@ -184,7 +184,6 @@ func (m *resolveVariableReferences) Apply(ctx context.Context, b *bundle.Bundle) diags = diags.Extend(normaliseDiags) return root, nil }) - if err != nil { diags = diags.Extend(diag.FromErr(err)) } diff --git a/bundle/config/mutator/rewrite_workspace_prefix.go b/bundle/config/mutator/rewrite_workspace_prefix.go index 8a39ee8a1..0ccb3314b 100644 --- a/bundle/config/mutator/rewrite_workspace_prefix.go +++ b/bundle/config/mutator/rewrite_workspace_prefix.go @@ -63,7 +63,6 @@ func (m *rewriteWorkspacePrefix) Apply(ctx context.Context, b *bundle.Bundle) di return v, nil }) }) - if err != nil { return diag.FromErr(err) } diff --git a/bundle/config/mutator/rewrite_workspace_prefix_test.go b/bundle/config/mutator/rewrite_workspace_prefix_test.go index d75ec89db..48973a4cf 100644 --- a/bundle/config/mutator/rewrite_workspace_prefix_test.go +++ b/bundle/config/mutator/rewrite_workspace_prefix_test.go @@ -81,5 +81,4 @@ func TestNoWorkspacePrefixUsed(t *testing.T) { require.Equal(t, "${workspace.artifact_path}/jar1.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[1].Libraries[0].Jar) require.Equal(t, "${workspace.file_path}/notebook2", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].NotebookTask.NotebookPath) require.Equal(t, "${workspace.artifact_path}/jar2.jar", b.Config.Resources.Jobs["test_job"].JobSettings.Tasks[2].Libraries[0].Jar) - } diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go index 0ca71e28e..7ffd782c2 100644 --- a/bundle/config/mutator/run_as.go +++ b/bundle/config/mutator/run_as.go @@ -12,8 +12,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" ) -type setRunAs struct { -} +type setRunAs struct{} // This mutator does two things: // @@ -30,7 +29,7 @@ func (m *setRunAs) Name() string { return "SetRunAs" } -func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser string, runAsUser string) diag.Diagnostics { +func reportRunAsNotSupported(resourceType string, location dyn.Location, currentUser, runAsUser string) diag.Diagnostics { return diag.Diagnostics{{ Summary: fmt.Sprintf("%s do not support a setting a run_as user that is different from the owner.\n"+ "Current identity: %s. Run as identity: %s.\n"+ diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 47ce2ad03..9e9f2dcfe 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -65,7 +65,6 @@ func setVariable(ctx context.Context, v dyn.Value, variable *variable.Variable, // We should have had a value to set for the variable at this point. return dyn.InvalidValue, fmt.Errorf(`no value assigned to required variable %s. Assignment can be done through the "--var" flag or by setting the %s environment variable`, name, bundleVarPrefix+name) - } func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diff --git a/bundle/config/mutator/sync_infer_root.go b/bundle/config/mutator/sync_infer_root.go index 512adcdbf..160fcc908 100644 --- a/bundle/config/mutator/sync_infer_root.go +++ b/bundle/config/mutator/sync_infer_root.go @@ -35,7 +35,7 @@ func (m *syncInferRoot) Name() string { // If the path does not exist, it returns an empty string. // // See "sync_infer_root_internal_test.go" for examples. -func (m *syncInferRoot) computeRoot(path string, root string) string { +func (m *syncInferRoot) computeRoot(path, root string) string { for !filepath.IsLocal(path) { // Break if we have reached the root of the filesystem. dir := filepath.Dir(root) diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index 5e016d8a1..af0f94120 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -275,8 +275,8 @@ func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) diag.Diagnos } func gatherFallbackPaths(v dyn.Value, typ string) (map[string]string, error) { - var fallback = make(map[string]string) - var pattern = dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey()) + fallback := make(map[string]string) + pattern := dyn.NewPattern(dyn.Key("resources"), dyn.Key(typ), dyn.AnyKey()) // Previous behavior was to use a resource's location as the base path to resolve // relative paths in its definition. With the introduction of [dyn.Value] throughout, diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index bf6ba15d8..493abb8c5 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -28,12 +28,13 @@ import ( func touchNotebookFile(t *testing.T, path string) { f, err := os.Create(path) require.NoError(t, err) - f.WriteString("# Databricks notebook source\n") + _, err = f.WriteString("# Databricks notebook source\n") + require.NoError(t, err) f.Close() } func touchEmptyFile(t *testing.T, path string) { - err := os.MkdirAll(filepath.Dir(path), 0700) + err := os.MkdirAll(filepath.Dir(path), 0o700) require.NoError(t, err) f, err := os.Create(path) require.NoError(t, err) diff --git a/bundle/config/mutator/verify_cli_version.go b/bundle/config/mutator/verify_cli_version.go index 279af44e6..873e4f780 100644 --- a/bundle/config/mutator/verify_cli_version.go +++ b/bundle/config/mutator/verify_cli_version.go @@ -15,8 +15,7 @@ func VerifyCliVersion() bundle.Mutator { return &verifyCliVersion{} } -type verifyCliVersion struct { -} +type verifyCliVersion struct{} func (v *verifyCliVersion) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { // No constraints specified, skip the check. diff --git a/bundle/config/presets.go b/bundle/config/presets.go index 30f56c0f8..252c5b5f7 100644 --- a/bundle/config/presets.go +++ b/bundle/config/presets.go @@ -1,7 +1,9 @@ package config -const Paused = "PAUSED" -const Unpaused = "UNPAUSED" +const ( + Paused = "PAUSED" + Unpaused = "UNPAUSED" +) type Presets struct { // NamePrefix to prepend to all resource names. diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 9ae73b22a..2d05acf3e 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -49,7 +49,8 @@ func TestCustomMarshallerIsImplemented(t *testing.T) { // Eg: resource.Job implements MarshalJSON v := reflect.Zero(vt.Elem()).Interface() assert.NotPanics(t, func() { - json.Marshal(v) + _, err := json.Marshal(v) + assert.NoError(t, err) }, "Resource %s does not have a custom marshaller", field.Name) // Unmarshalling a *resourceStruct will panic if the resource does not have a custom unmarshaller @@ -58,7 +59,8 @@ func TestCustomMarshallerIsImplemented(t *testing.T) { // Eg: *resource.Job implements UnmarshalJSON v = reflect.New(vt.Elem()).Interface() assert.NotPanics(t, func() { - json.Unmarshal([]byte("{}"), v) + err := json.Unmarshal([]byte("{}"), v) + assert.NoError(t, err) }, "Resource %s does not have a custom unmarshaller", field.Name) } } diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index a77f961bd..42fae49d9 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -100,7 +100,7 @@ func TestRootMergeTargetOverridesWithMode(t *testing.T) { }, }, } - root.initializeDynamicValue() + require.NoError(t, root.initializeDynamicValue()) require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, Development, root.Bundle.Mode) } @@ -156,7 +156,7 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) { }, }, } - root.initializeDynamicValue() + require.NoError(t, root.initializeDynamicValue()) require.NoError(t, root.MergeTargetOverrides("development")) assert.Equal(t, "bar", root.Variables["foo"].Default) assert.Equal(t, "foo var", root.Variables["foo"].Description) @@ -168,7 +168,6 @@ func TestRootMergeTargetOverridesWithVariables(t *testing.T) { "key1": "value1", }, root.Variables["complex"].Default) assert.Equal(t, "complex var", root.Variables["complex"].Description) - } func TestIsFullVariableOverrideDef(t *testing.T) { @@ -252,5 +251,4 @@ func TestIsFullVariableOverrideDef(t *testing.T) { for i, tc := range testCases { assert.Equal(t, tc.expected, isFullVariableOverrideDef(tc.value), "test case %d", i) } - } diff --git a/bundle/config/validate/files_to_sync.go b/bundle/config/validate/files_to_sync.go index a14278482..b4de06773 100644 --- a/bundle/config/validate/files_to_sync.go +++ b/bundle/config/validate/files_to_sync.go @@ -13,8 +13,7 @@ func FilesToSync() bundle.ReadOnlyMutator { return &filesToSync{} } -type filesToSync struct { -} +type filesToSync struct{} func (v *filesToSync) Name() string { return "validate:files_to_sync" diff --git a/bundle/config/validate/files_to_sync_test.go b/bundle/config/validate/files_to_sync_test.go index 30af9026d..d6a1ed59a 100644 --- a/bundle/config/validate/files_to_sync_test.go +++ b/bundle/config/validate/files_to_sync_test.go @@ -2,6 +2,7 @@ package validate import ( "context" + "path/filepath" "testing" "github.com/databricks/cli/bundle" @@ -81,7 +82,7 @@ func TestFilesToSync_EverythingIgnored(t *testing.T) { b := setupBundleForFilesToSyncTest(t) // Ignore all files. - testutil.WriteFile(t, "*\n.*\n", b.BundleRootPath, ".gitignore") + testutil.WriteFile(t, filepath.Join(b.BundleRootPath, ".gitignore"), "*\n.*\n") ctx := context.Background() rb := bundle.ReadOnly(b) diff --git a/bundle/config/validate/folder_permissions.go b/bundle/config/validate/folder_permissions.go index 505e82a1e..aa89a0551 100644 --- a/bundle/config/validate/folder_permissions.go +++ b/bundle/config/validate/folder_permissions.go @@ -15,8 +15,7 @@ import ( "golang.org/x/sync/errgroup" ) -type folderPermissions struct { -} +type folderPermissions struct{} // Apply implements bundle.ReadOnlyMutator. func (f *folderPermissions) Apply(ctx context.Context, b bundle.ReadOnlyBundle) diag.Diagnostics { diff --git a/bundle/config/validate/job_cluster_key_defined.go b/bundle/config/validate/job_cluster_key_defined.go index 368c3edb1..c3a1ab3df 100644 --- a/bundle/config/validate/job_cluster_key_defined.go +++ b/bundle/config/validate/job_cluster_key_defined.go @@ -13,8 +13,7 @@ func JobClusterKeyDefined() bundle.ReadOnlyMutator { return &jobClusterKeyDefined{} } -type jobClusterKeyDefined struct { -} +type jobClusterKeyDefined struct{} func (v *jobClusterKeyDefined) Name() string { return "validate:job_cluster_key_defined" diff --git a/bundle/config/validate/job_task_cluster_spec.go b/bundle/config/validate/job_task_cluster_spec.go index b80befcdf..5f532acfe 100644 --- a/bundle/config/validate/job_task_cluster_spec.go +++ b/bundle/config/validate/job_task_cluster_spec.go @@ -17,8 +17,7 @@ func JobTaskClusterSpec() bundle.ReadOnlyMutator { return &jobTaskClusterSpec{} } -type jobTaskClusterSpec struct { -} +type jobTaskClusterSpec struct{} func (v *jobTaskClusterSpec) Name() string { return "validate:job_task_cluster_spec" diff --git a/bundle/config/validate/single_node_cluster_test.go b/bundle/config/validate/single_node_cluster_test.go index 18771cc00..c3ead8ef6 100644 --- a/bundle/config/validate/single_node_cluster_test.go +++ b/bundle/config/validate/single_node_cluster_test.go @@ -175,7 +175,6 @@ func TestValidateSingleNodeClusterFailForJobClusters(t *testing.T) { Paths: []dyn.Path{dyn.MustPathFromString("resources.jobs.foo.job_clusters[0].new_cluster")}, }, }, diags) - }) } } diff --git a/bundle/config/validate/validate.go b/bundle/config/validate/validate.go index eb4c3c3cd..131566fc9 100644 --- a/bundle/config/validate/validate.go +++ b/bundle/config/validate/validate.go @@ -8,8 +8,7 @@ import ( "github.com/databricks/cli/libs/dyn" ) -type validate struct { -} +type validate struct{} type location struct { path string diff --git a/bundle/config/validate/validate_sync_patterns.go b/bundle/config/validate/validate_sync_patterns.go index 52f06835c..f5787a81d 100644 --- a/bundle/config/validate/validate_sync_patterns.go +++ b/bundle/config/validate/validate_sync_patterns.go @@ -17,8 +17,7 @@ func ValidateSyncPatterns() bundle.ReadOnlyMutator { return &validateSyncPatterns{} } -type validateSyncPatterns struct { -} +type validateSyncPatterns struct{} func (v *validateSyncPatterns) Name() string { return "validate:validate_sync_patterns" diff --git a/bundle/config/variable/lookup_test.go b/bundle/config/variable/lookup_test.go index a84748751..bd54d89fc 100644 --- a/bundle/config/variable/lookup_test.go +++ b/bundle/config/variable/lookup_test.go @@ -42,7 +42,6 @@ func TestLookup_Empty(t *testing.T) { // No string representation for an invalid lookup assert.Empty(t, lookup.String()) - } func TestLookup_Multiple(t *testing.T) { diff --git a/bundle/config/variable/resolve_cluster.go b/bundle/config/variable/resolve_cluster.go index 2d68b7fb7..a8cf3fe7f 100644 --- a/bundle/config/variable/resolve_cluster.go +++ b/bundle/config/variable/resolve_cluster.go @@ -20,7 +20,6 @@ func (l resolveCluster) Resolve(ctx context.Context, w *databricks.WorkspaceClie ClusterSources: []compute.ClusterSource{compute.ClusterSourceApi, compute.ClusterSourceUi}, }, }) - if err != nil { return "", err } diff --git a/bundle/config/workspace_test.go b/bundle/config/workspace_test.go index 3ef963253..384cc0a2c 100644 --- a/bundle/config/workspace_test.go +++ b/bundle/config/workspace_test.go @@ -11,6 +11,7 @@ import ( "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func setupWorkspaceTest(t *testing.T) string { @@ -42,11 +43,12 @@ func TestWorkspaceResolveProfileFromHost(t *testing.T) { setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ Profile: "default", Host: "https://abc.cloud.databricks.com", Token: "123", }) + require.NoError(t, err) client, err := w.Client() assert.NoError(t, err) @@ -57,12 +59,13 @@ func TestWorkspaceResolveProfileFromHost(t *testing.T) { home := setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ ConfigFile: filepath.Join(home, "customcfg"), Profile: "custom", Host: "https://abc.cloud.databricks.com", Token: "123", }) + require.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", filepath.Join(home, "customcfg")) client, err := w.Client() @@ -90,12 +93,13 @@ func TestWorkspaceVerifyProfileForHost(t *testing.T) { setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ Profile: "abc", Host: "https://abc.cloud.databricks.com", }) + require.NoError(t, err) - _, err := w.Client() + _, err = w.Client() assert.NoError(t, err) }) @@ -103,12 +107,13 @@ func TestWorkspaceVerifyProfileForHost(t *testing.T) { setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ Profile: "abc", Host: "https://def.cloud.databricks.com", }) + require.NoError(t, err) - _, err := w.Client() + _, err = w.Client() assert.ErrorContains(t, err, "config host mismatch") }) @@ -116,14 +121,15 @@ func TestWorkspaceVerifyProfileForHost(t *testing.T) { home := setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ ConfigFile: filepath.Join(home, "customcfg"), Profile: "abc", Host: "https://abc.cloud.databricks.com", }) + require.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", filepath.Join(home, "customcfg")) - _, err := w.Client() + _, err = w.Client() assert.NoError(t, err) }) @@ -131,14 +137,15 @@ func TestWorkspaceVerifyProfileForHost(t *testing.T) { home := setupWorkspaceTest(t) // This works if there is a config file with a matching profile. - databrickscfg.SaveToProfile(context.Background(), &config.Config{ + err := databrickscfg.SaveToProfile(context.Background(), &config.Config{ ConfigFile: filepath.Join(home, "customcfg"), Profile: "abc", Host: "https://def.cloud.databricks.com", }) + require.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", filepath.Join(home, "customcfg")) - _, err := w.Client() + _, err = w.Client() assert.ErrorContains(t, err, "config host mismatch") }) } diff --git a/bundle/deferred.go b/bundle/deferred.go index 56c2bdca2..e7e0c2aeb 100644 --- a/bundle/deferred.go +++ b/bundle/deferred.go @@ -15,7 +15,7 @@ func (d *DeferredMutator) Name() string { return "deferred" } -func Defer(mutator Mutator, finally Mutator) Mutator { +func Defer(mutator, finally Mutator) Mutator { return &DeferredMutator{ mutator: mutator, finally: finally, diff --git a/bundle/deferred_test.go b/bundle/deferred_test.go index 3abc4aa10..ea3df17c4 100644 --- a/bundle/deferred_test.go +++ b/bundle/deferred_test.go @@ -19,7 +19,7 @@ func (t *mutatorWithError) Name() string { func (t *mutatorWithError) Apply(_ context.Context, b *Bundle) diag.Diagnostics { t.applyCalled++ - return diag.Errorf(t.errorMsg) + return diag.Errorf(t.errorMsg) // nolint:govet } func TestDeferredMutatorWhenAllMutatorsSucceed(t *testing.T) { diff --git a/bundle/deploy/state.go b/bundle/deploy/state.go index 4f2bc4ee4..a131ab9c3 100644 --- a/bundle/deploy/state.go +++ b/bundle/deploy/state.go @@ -15,8 +15,10 @@ import ( "github.com/google/uuid" ) -const DeploymentStateFileName = "deployment.json" -const DeploymentStateVersion = 1 +const ( + DeploymentStateFileName = "deployment.json" + DeploymentStateVersion = 1 +) type File struct { LocalPath string `json:"local_path"` @@ -132,7 +134,7 @@ func (f Filelist) ToSlice(root vfs.Path) []fileset.File { return files } -func isLocalStateStale(local io.Reader, remote io.Reader) bool { +func isLocalStateStale(local, remote io.Reader) bool { localState, err := loadState(local) if err != nil { return true diff --git a/bundle/deploy/state_pull.go b/bundle/deploy/state_pull.go index 5e301a6f3..8fffca073 100644 --- a/bundle/deploy/state_pull.go +++ b/bundle/deploy/state_pull.go @@ -44,7 +44,7 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic return diag.FromErr(err) } - local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0600) + local, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR, 0o600) if err != nil { return diag.FromErr(err) } @@ -62,8 +62,14 @@ func (s *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic } // Truncating the file before writing - local.Truncate(0) - local.Seek(0, 0) + err = local.Truncate(0) + if err != nil { + return diag.FromErr(err) + } + _, err = local.Seek(0, 0) + if err != nil { + return diag.FromErr(err) + } // Write file to disk. log.Infof(ctx, "Writing remote deployment state file to local cache directory") diff --git a/bundle/deploy/state_pull_test.go b/bundle/deploy/state_pull_test.go index 42701eb26..36c49fb01 100644 --- a/bundle/deploy/state_pull_test.go +++ b/bundle/deploy/state_pull_test.go @@ -99,7 +99,7 @@ func testStatePull(t *testing.T, opts statePullOpts) { snapshotPath, err := sync.SnapshotPath(opts) require.NoError(t, err) - err = os.WriteFile(snapshotPath, []byte("snapshot"), 0644) + err = os.WriteFile(snapshotPath, []byte("snapshot"), 0o644) require.NoError(t, err) } @@ -110,7 +110,7 @@ func testStatePull(t *testing.T, opts statePullOpts) { data, err := json.Marshal(opts.localState) require.NoError(t, err) - err = os.WriteFile(statePath, data, 0644) + err = os.WriteFile(statePath, data, 0o644) require.NoError(t, err) } diff --git a/bundle/deploy/state_push_test.go b/bundle/deploy/state_push_test.go index 038b75341..3562ec147 100644 --- a/bundle/deploy/state_push_test.go +++ b/bundle/deploy/state_push_test.go @@ -74,7 +74,7 @@ func TestStatePush(t *testing.T) { data, err := json.Marshal(state) require.NoError(t, err) - err = os.WriteFile(statePath, data, 0644) + err = os.WriteFile(statePath, data, 0o644) require.NoError(t, err) diags := bundle.Apply(ctx, b, s) diff --git a/bundle/deploy/state_update.go b/bundle/deploy/state_update.go index 9ab1bacf1..5488d50ed 100644 --- a/bundle/deploy/state_update.go +++ b/bundle/deploy/state_update.go @@ -17,8 +17,7 @@ import ( "github.com/google/uuid" ) -type stateUpdate struct { -} +type stateUpdate struct{} func (s *stateUpdate) Name() string { return "deploy:state-update" @@ -57,7 +56,7 @@ func (s *stateUpdate) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnost return diag.FromErr(err) } // Write the state back to the file. - f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0600) + f, err := os.OpenFile(statePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0o600) if err != nil { log.Infof(ctx, "Unable to open deployment state file: %s", err) return diag.FromErr(err) diff --git a/bundle/deploy/state_update_test.go b/bundle/deploy/state_update_test.go index 1f5010b52..e561f534e 100644 --- a/bundle/deploy/state_update_test.go +++ b/bundle/deploy/state_update_test.go @@ -119,7 +119,7 @@ func TestStateUpdateWithExistingState(t *testing.T) { data, err := json.Marshal(state) require.NoError(t, err) - err = os.WriteFile(statePath, data, 0644) + err = os.WriteFile(statePath, data, 0o644) require.NoError(t, err) diags := bundle.Apply(ctx, b, s) diff --git a/bundle/deploy/terraform/check_dashboards_modified_remotely.go b/bundle/deploy/terraform/check_dashboards_modified_remotely.go index c884bcb9b..f263e8a7f 100644 --- a/bundle/deploy/terraform/check_dashboards_modified_remotely.go +++ b/bundle/deploy/terraform/check_dashboards_modified_remotely.go @@ -42,8 +42,7 @@ func collectDashboardsFromState(ctx context.Context, b *bundle.Bundle) ([]dashbo return dashboards, nil } -type checkDashboardsModifiedRemotely struct { -} +type checkDashboardsModifiedRemotely struct{} func (l *checkDashboardsModifiedRemotely) Name() string { return "CheckDashboardsModifiedRemotely" diff --git a/bundle/deploy/terraform/check_dashboards_modified_remotely_test.go b/bundle/deploy/terraform/check_dashboards_modified_remotely_test.go index 25aee125f..1bed3b1be 100644 --- a/bundle/deploy/terraform/check_dashboards_modified_remotely_test.go +++ b/bundle/deploy/terraform/check_dashboards_modified_remotely_test.go @@ -139,7 +139,7 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle require.NoError(t, err) // Write fake state file. - testutil.WriteFile(t, ` + testutil.WriteFile(t, filepath.Join(tfDir, TerraformStateFileName), ` { "version": 4, "terraform_version": "1.5.5", @@ -187,5 +187,5 @@ func writeFakeDashboardState(t *testing.T, ctx context.Context, b *bundle.Bundle } ] } - `, filepath.Join(tfDir, TerraformStateFileName)) + `) } diff --git a/bundle/deploy/terraform/check_running_resources.go b/bundle/deploy/terraform/check_running_resources.go index 737f773e5..5b3a70408 100644 --- a/bundle/deploy/terraform/check_running_resources.go +++ b/bundle/deploy/terraform/check_running_resources.go @@ -23,8 +23,7 @@ func (e ErrResourceIsRunning) Error() string { return fmt.Sprintf("%s %s is running", e.resourceType, e.resourceId) } -type checkRunningResources struct { -} +type checkRunningResources struct{} func (l *checkRunningResources) Name() string { return "check-running-resources" diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 076d9b7a0..61f26c088 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -43,7 +43,7 @@ func convertToResourceStruct[T any](t *testing.T, resource *T, data any) { } func TestBundleToTerraformJob(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ JobSettings: &jobs.JobSettings{ Name: "my job", JobClusters: []jobs.JobCluster{ @@ -71,7 +71,7 @@ func TestBundleToTerraformJob(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my_job": &src, @@ -93,7 +93,7 @@ func TestBundleToTerraformJob(t *testing.T) { } func TestBundleToTerraformJobPermissions(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ Permissions: []resources.Permission{ { Level: "CAN_VIEW", @@ -102,7 +102,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my_job": &src, @@ -121,7 +121,7 @@ func TestBundleToTerraformJobPermissions(t *testing.T) { } func TestBundleToTerraformJobTaskLibraries(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ JobSettings: &jobs.JobSettings{ Name: "my job", Tasks: []jobs.Task{ @@ -139,7 +139,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my_job": &src, @@ -158,7 +158,7 @@ func TestBundleToTerraformJobTaskLibraries(t *testing.T) { } func TestBundleToTerraformForEachTaskLibraries(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ JobSettings: &jobs.JobSettings{ Name: "my job", Tasks: []jobs.Task{ @@ -182,7 +182,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my_job": &src, @@ -201,7 +201,7 @@ func TestBundleToTerraformForEachTaskLibraries(t *testing.T) { } func TestBundleToTerraformPipeline(t *testing.T) { - var src = resources.Pipeline{ + src := resources.Pipeline{ PipelineSpec: &pipelines.PipelineSpec{ Name: "my pipeline", Libraries: []pipelines.PipelineLibrary{ @@ -239,7 +239,7 @@ func TestBundleToTerraformPipeline(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "my_pipeline": &src, @@ -262,7 +262,7 @@ func TestBundleToTerraformPipeline(t *testing.T) { } func TestBundleToTerraformPipelinePermissions(t *testing.T) { - var src = resources.Pipeline{ + src := resources.Pipeline{ Permissions: []resources.Permission{ { Level: "CAN_VIEW", @@ -271,7 +271,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "my_pipeline": &src, @@ -290,7 +290,7 @@ func TestBundleToTerraformPipelinePermissions(t *testing.T) { } func TestBundleToTerraformModel(t *testing.T) { - var src = resources.MlflowModel{ + src := resources.MlflowModel{ Model: &ml.Model{ Name: "name", Description: "description", @@ -307,7 +307,7 @@ func TestBundleToTerraformModel(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Models: map[string]*resources.MlflowModel{ "my_model": &src, @@ -330,7 +330,7 @@ func TestBundleToTerraformModel(t *testing.T) { } func TestBundleToTerraformModelPermissions(t *testing.T) { - var src = resources.MlflowModel{ + src := resources.MlflowModel{ Model: &ml.Model{ Name: "name", }, @@ -342,7 +342,7 @@ func TestBundleToTerraformModelPermissions(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Models: map[string]*resources.MlflowModel{ "my_model": &src, @@ -361,13 +361,13 @@ func TestBundleToTerraformModelPermissions(t *testing.T) { } func TestBundleToTerraformExperiment(t *testing.T) { - var src = resources.MlflowExperiment{ + src := resources.MlflowExperiment{ Experiment: &ml.Experiment{ Name: "name", }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Experiments: map[string]*resources.MlflowExperiment{ "my_experiment": &src, @@ -384,7 +384,7 @@ func TestBundleToTerraformExperiment(t *testing.T) { } func TestBundleToTerraformExperimentPermissions(t *testing.T) { - var src = resources.MlflowExperiment{ + src := resources.MlflowExperiment{ Experiment: &ml.Experiment{ Name: "name", }, @@ -396,7 +396,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Experiments: map[string]*resources.MlflowExperiment{ "my_experiment": &src, @@ -415,7 +415,7 @@ func TestBundleToTerraformExperimentPermissions(t *testing.T) { } func TestBundleToTerraformModelServing(t *testing.T) { - var src = resources.ModelServingEndpoint{ + src := resources.ModelServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{ Name: "name", Config: serving.EndpointCoreConfigInput{ @@ -439,7 +439,7 @@ func TestBundleToTerraformModelServing(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ "my_model_serving_endpoint": &src, @@ -462,7 +462,7 @@ func TestBundleToTerraformModelServing(t *testing.T) { } func TestBundleToTerraformModelServingPermissions(t *testing.T) { - var src = resources.ModelServingEndpoint{ + src := resources.ModelServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{ Name: "name", @@ -492,7 +492,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ "my_model_serving_endpoint": &src, @@ -511,7 +511,7 @@ func TestBundleToTerraformModelServingPermissions(t *testing.T) { } func TestBundleToTerraformRegisteredModel(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ Name: "name", CatalogName: "catalog", @@ -520,7 +520,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ RegisteredModels: map[string]*resources.RegisteredModel{ "my_registered_model": &src, @@ -540,7 +540,7 @@ func TestBundleToTerraformRegisteredModel(t *testing.T) { } func TestBundleToTerraformRegisteredModelGrants(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ Name: "name", CatalogName: "catalog", @@ -554,7 +554,7 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) { }, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ RegisteredModels: map[string]*resources.RegisteredModel{ "my_registered_model": &src, @@ -573,14 +573,14 @@ func TestBundleToTerraformRegisteredModelGrants(t *testing.T) { } func TestBundleToTerraformDeletedResources(t *testing.T) { - var job1 = resources.Job{ + job1 := resources.Job{ JobSettings: &jobs.JobSettings{}, } - var job2 = resources.Job{ + job2 := resources.Job{ ModifiedStatus: resources.ModifiedStatusDeleted, JobSettings: &jobs.JobSettings{}, } - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "my_job1": &job1, @@ -601,10 +601,10 @@ func TestBundleToTerraformDeletedResources(t *testing.T) { } func TestTerraformToBundleEmptyLocalResources(t *testing.T) { - var config = config.Root{ + config := config.Root{ Resources: config.Resources{}, } - var tfState = resourcesState{ + tfState := resourcesState{ Resources: []stateResource{ { Type: "databricks_job", @@ -736,7 +736,7 @@ func TestTerraformToBundleEmptyLocalResources(t *testing.T) { } func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "test_job": { @@ -817,7 +817,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { }, }, } - var tfState = resourcesState{ + tfState := resourcesState{ Resources: nil, } err := TerraformToBundle(&tfState, &config) @@ -860,7 +860,7 @@ func TestTerraformToBundleEmptyRemoteResources(t *testing.T) { } func TestTerraformToBundleModifiedResources(t *testing.T) { - var config = config.Root{ + config := config.Root{ Resources: config.Resources{ Jobs: map[string]*resources.Job{ "test_job": { @@ -996,7 +996,7 @@ func TestTerraformToBundleModifiedResources(t *testing.T) { }, }, } - var tfState = resourcesState{ + tfState := resourcesState{ Resources: []stateResource{ { Type: "databricks_job", diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 7d75ee8a8..366f0be8c 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -145,7 +145,7 @@ func inheritEnvVars(ctx context.Context, environ map[string]string) error { // This function is used for env vars set by the Databricks VSCode extension. The variables are intended to be used by the CLI // bundled with the Databricks VSCode extension, but users can use different CLI versions in the VSCode terminals, in which case we want to ignore // the variables if that CLI uses different versions of the dependencies. -func getEnvVarWithMatchingVersion(ctx context.Context, envVarName string, versionVarName string, currentVersion string) (string, error) { +func getEnvVarWithMatchingVersion(ctx context.Context, envVarName, versionVarName, currentVersion string) (string, error) { envValue := env.Get(ctx, envVarName) versionValue := env.Get(ctx, versionVarName) diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index e3621c6c3..a1ffc5a1a 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -400,7 +400,7 @@ func TestFindExecPathFromEnvironmentWithCorrectVersionAndBinary(t *testing.T) { require.Equal(t, tmpBinPath, b.Config.Bundle.Terraform.ExecPath) } -func createTempFile(t *testing.T, dest string, name string, executable bool) string { +func createTempFile(t *testing.T, dest, name string, executable bool) string { binPath := filepath.Join(dest, name) f, err := os.Create(binPath) require.NoError(t, err) @@ -409,7 +409,7 @@ func createTempFile(t *testing.T, dest string, name string, executable bool) str require.NoError(t, err) }() if executable { - err = f.Chmod(0777) + err = f.Chmod(0o777) require.NoError(t, err) } return binPath @@ -422,7 +422,7 @@ func TestGetEnvVarWithMatchingVersion(t *testing.T) { tmp := t.TempDir() file := testutil.Touch(t, tmp, "bar") - var tc = []struct { + tc := []struct { envValue string versionValue string currentVersion string diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index 9c2126aec..813e6bbb7 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -10,8 +10,7 @@ import ( "github.com/databricks/cli/libs/dyn/dynvar" ) -type interpolateMutator struct { -} +type interpolateMutator struct{} func Interpolate() bundle.Mutator { return &interpolateMutator{} diff --git a/bundle/deploy/terraform/pkg.go b/bundle/deploy/terraform/pkg.go index cad754024..bd636639f 100644 --- a/bundle/deploy/terraform/pkg.go +++ b/bundle/deploy/terraform/pkg.go @@ -5,15 +5,19 @@ import ( "github.com/hashicorp/go-version" ) -const TerraformStateFileName = "terraform.tfstate" -const TerraformConfigFileName = "bundle.tf.json" +const ( + TerraformStateFileName = "terraform.tfstate" + TerraformConfigFileName = "bundle.tf.json" +) // Users can provide their own terraform binary and databricks terraform provider by setting the following environment variables. // This allows users to use the CLI in an air-gapped environments. See the `debug terraform` command. -const TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH" -const TerraformVersionEnv = "DATABRICKS_TF_VERSION" -const TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE" -const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION" +const ( + TerraformExecPathEnv = "DATABRICKS_TF_EXEC_PATH" + TerraformVersionEnv = "DATABRICKS_TF_VERSION" + TerraformCliConfigPathEnv = "DATABRICKS_TF_CLI_CONFIG_FILE" + TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION" +) // Terraform CLI version to use and the corresponding checksums for it. The // checksums are used to verify the integrity of the downloaded binary. Please @@ -26,8 +30,10 @@ const TerraformProviderVersionEnv = "DATABRICKS_TF_PROVIDER_VERSION" // downloaded Terraform archive. var TerraformVersion = version.Must(version.NewVersion("1.5.5")) -const checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2" -const checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a" +const ( + checksumLinuxArm64 = "b055aefe343d0b710d8a7afd31aeb702b37bbf4493bb9385a709991e48dfbcd2" + checksumLinuxAmd64 = "ad0c696c870c8525357b5127680cd79c0bdf58179af9acd091d43b1d6482da4a" +) type Checksum struct { LinuxArm64 string `json:"linux_arm64"` diff --git a/bundle/deploy/terraform/pkg_test.go b/bundle/deploy/terraform/pkg_test.go index b8dcb9e08..08ec3de75 100644 --- a/bundle/deploy/terraform/pkg_test.go +++ b/bundle/deploy/terraform/pkg_test.go @@ -14,7 +14,7 @@ import ( "github.com/stretchr/testify/require" ) -func downloadAndChecksum(t *testing.T, url string, expectedChecksum string) { +func downloadAndChecksum(t *testing.T, url, expectedChecksum string) { resp, err := http.Get(url) require.NoError(t, err) defer resp.Body.Close() diff --git a/bundle/deploy/terraform/plan.go b/bundle/deploy/terraform/plan.go index 72f0b49a8..7f7473efa 100644 --- a/bundle/deploy/terraform/plan.go +++ b/bundle/deploy/terraform/plan.go @@ -2,7 +2,6 @@ package terraform import ( "context" - "fmt" "path/filepath" "github.com/databricks/cli/bundle" @@ -57,7 +56,7 @@ func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { IsEmpty: !notEmpty, } - log.Debugf(ctx, fmt.Sprintf("Planning complete and persisted at %s\n", planPath)) + log.Debugf(ctx, "Planning complete and persisted at %s\n", planPath) return nil } diff --git a/bundle/deploy/terraform/state_pull.go b/bundle/deploy/terraform/state_pull.go index 9a5b91007..4e1e2b1c5 100644 --- a/bundle/deploy/terraform/state_pull.go +++ b/bundle/deploy/terraform/state_pull.go @@ -104,7 +104,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic localState, err := l.localState(ctx, b) if errors.Is(err, fs.ErrNotExist) { log.Infof(ctx, "Local state file does not exist. Using remote Terraform state.") - err := os.WriteFile(localStatePath, remoteContent, 0600) + err := os.WriteFile(localStatePath, remoteContent, 0o600) return diag.FromErr(err) } if err != nil { @@ -114,14 +114,14 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostic // If the lineage does not match, the Terraform state files do not correspond to the same deployment. if localState.Lineage != remoteState.Lineage { log.Infof(ctx, "Remote and local state lineages do not match. Using remote Terraform state. Invalidating local Terraform state.") - err := os.WriteFile(localStatePath, remoteContent, 0600) + err := os.WriteFile(localStatePath, remoteContent, 0o600) return diag.FromErr(err) } // If the remote state is newer than the local state, we should use the remote state. if remoteState.Serial > localState.Serial { log.Infof(ctx, "Remote state is newer than local state. Using remote Terraform state.") - err := os.WriteFile(localStatePath, remoteContent, 0600) + err := os.WriteFile(localStatePath, remoteContent, 0o600) return diag.FromErr(err) } diff --git a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go index e6d2620c6..330720a7c 100644 --- a/bundle/deploy/terraform/tfdyn/convert_cluster_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_cluster_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertCluster(t *testing.T) { - var src = resources.Cluster{ + src := resources.Cluster{ ClusterSpec: &compute.ClusterSpec{ NumWorkers: 3, SparkVersion: "13.3.x-scala2.12", @@ -93,5 +93,4 @@ func TestConvertCluster(t *testing.T) { }, }, }, out.Permissions["cluster_my_cluster"]) - } diff --git a/bundle/deploy/terraform/tfdyn/convert_dashboard.go b/bundle/deploy/terraform/tfdyn/convert_dashboard.go index 3ba7e19a2..3ec8b489f 100644 --- a/bundle/deploy/terraform/tfdyn/convert_dashboard.go +++ b/bundle/deploy/terraform/tfdyn/convert_dashboard.go @@ -17,7 +17,7 @@ const ( ) // Marshal "serialized_dashboard" as JSON if it is set in the input but not in the output. -func marshalSerializedDashboard(vin dyn.Value, vout dyn.Value) (dyn.Value, error) { +func marshalSerializedDashboard(vin, vout dyn.Value) (dyn.Value, error) { // Skip if the "serialized_dashboard" field is already set. if v := vout.Get(serializedDashboardFieldName); v.IsValid() { return vout, nil diff --git a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go index 539ba21aa..6f5d36504 100644 --- a/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_dashboard_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertDashboard(t *testing.T) { - var src = resources.Dashboard{ + src := resources.Dashboard{ Dashboard: &dashboards.Dashboard{ DisplayName: "my dashboard", WarehouseId: "f00dcafe", @@ -60,7 +60,7 @@ func TestConvertDashboard(t *testing.T) { } func TestConvertDashboardFilePath(t *testing.T) { - var src = resources.Dashboard{ + src := resources.Dashboard{ FilePath: "some/path", } @@ -84,7 +84,7 @@ func TestConvertDashboardFilePath(t *testing.T) { } func TestConvertDashboardFilePathQuoted(t *testing.T) { - var src = resources.Dashboard{ + src := resources.Dashboard{ FilePath: `C:\foo\bar\baz\dashboard.lvdash.json`, } @@ -108,7 +108,7 @@ func TestConvertDashboardFilePathQuoted(t *testing.T) { } func TestConvertDashboardSerializedDashboardString(t *testing.T) { - var src = resources.Dashboard{ + src := resources.Dashboard{ SerializedDashboard: `{ "json": true }`, } @@ -127,7 +127,7 @@ func TestConvertDashboardSerializedDashboardString(t *testing.T) { } func TestConvertDashboardSerializedDashboardAny(t *testing.T) { - var src = resources.Dashboard{ + src := resources.Dashboard{ SerializedDashboard: map[string]any{ "pages": []map[string]any{ { diff --git a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go index 63add4368..3ef3963f2 100644 --- a/bundle/deploy/terraform/tfdyn/convert_experiment_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_experiment_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertExperiment(t *testing.T) { - var src = resources.MlflowExperiment{ + src := resources.MlflowExperiment{ Experiment: &ml.Experiment{ Name: "name", }, diff --git a/bundle/deploy/terraform/tfdyn/convert_grants_test.go b/bundle/deploy/terraform/tfdyn/convert_grants_test.go index a486bc36f..0a263b493 100644 --- a/bundle/deploy/terraform/tfdyn/convert_grants_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_grants_test.go @@ -13,7 +13,7 @@ import ( ) func TestConvertGrants(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ Grants: []resources.Grant{ { Privileges: []string{"EXECUTE", "FOO"}, @@ -45,7 +45,7 @@ func TestConvertGrants(t *testing.T) { } func TestConvertGrantsNil(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ Grants: nil, } @@ -58,7 +58,7 @@ func TestConvertGrantsNil(t *testing.T) { } func TestConvertGrantsEmpty(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ Grants: []resources.Grant{}, } diff --git a/bundle/deploy/terraform/tfdyn/convert_job.go b/bundle/deploy/terraform/tfdyn/convert_job.go index 8948e3baf..bb2f8cd0f 100644 --- a/bundle/deploy/terraform/tfdyn/convert_job.go +++ b/bundle/deploy/terraform/tfdyn/convert_job.go @@ -83,7 +83,6 @@ func convertJobResource(ctx context.Context, vin dyn.Value) (dyn.Value, error) { "libraries": "library", }) }) - if err != nil { return dyn.InvalidValue, err } diff --git a/bundle/deploy/terraform/tfdyn/convert_job_test.go b/bundle/deploy/terraform/tfdyn/convert_job_test.go index 695b9ba24..c73e530d4 100644 --- a/bundle/deploy/terraform/tfdyn/convert_job_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_job_test.go @@ -15,7 +15,7 @@ import ( ) func TestConvertJob(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ JobSettings: &jobs.JobSettings{ Name: "my job", JobClusters: []jobs.JobCluster{ diff --git a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go index 63b75e9ab..d46350bb7 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_serving_endpoint_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertModelServingEndpoint(t *testing.T) { - var src = resources.ModelServingEndpoint{ + src := resources.ModelServingEndpoint{ CreateServingEndpoint: &serving.CreateServingEndpoint{ Name: "name", Config: serving.EndpointCoreConfigInput{ diff --git a/bundle/deploy/terraform/tfdyn/convert_model_test.go b/bundle/deploy/terraform/tfdyn/convert_model_test.go index 542caa878..4c4e62c5b 100644 --- a/bundle/deploy/terraform/tfdyn/convert_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_model_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertModel(t *testing.T) { - var src = resources.MlflowModel{ + src := resources.MlflowModel{ Model: &ml.Model{ Name: "name", Description: "description", diff --git a/bundle/deploy/terraform/tfdyn/convert_permissions_test.go b/bundle/deploy/terraform/tfdyn/convert_permissions_test.go index ba389020f..ba04384b5 100644 --- a/bundle/deploy/terraform/tfdyn/convert_permissions_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_permissions_test.go @@ -13,7 +13,7 @@ import ( ) func TestConvertPermissions(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ Permissions: []resources.Permission{ { Level: "CAN_VIEW", @@ -59,7 +59,7 @@ func TestConvertPermissions(t *testing.T) { } func TestConvertPermissionsNil(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ Permissions: nil, } @@ -72,7 +72,7 @@ func TestConvertPermissionsNil(t *testing.T) { } func TestConvertPermissionsEmpty(t *testing.T) { - var src = resources.Job{ + src := resources.Job{ Permissions: []resources.Permission{}, } diff --git a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go index 7010d463a..0239bad18 100644 --- a/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_pipeline_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertPipeline(t *testing.T) { - var src = resources.Pipeline{ + src := resources.Pipeline{ PipelineSpec: &pipelines.PipelineSpec{ Name: "my pipeline", Libraries: []pipelines.PipelineLibrary{ diff --git a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go index f71abf43c..16b30de71 100644 --- a/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_quality_monitor_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertQualityMonitor(t *testing.T) { - var src = resources.QualityMonitor{ + src := resources.QualityMonitor{ TableName: "test_table_name", CreateMonitor: &catalog.CreateMonitor{ AssetsDir: "assets_dir", diff --git a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go index 77096e8d0..bf2a5ab64 100644 --- a/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_registered_model_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertRegisteredModel(t *testing.T) { - var src = resources.RegisteredModel{ + src := resources.RegisteredModel{ CreateRegisteredModelRequest: &catalog.CreateRegisteredModelRequest{ Name: "name", CatalogName: "catalog", diff --git a/bundle/deploy/terraform/tfdyn/convert_schema_test.go b/bundle/deploy/terraform/tfdyn/convert_schema_test.go index 2efbf3e43..12822bb3c 100644 --- a/bundle/deploy/terraform/tfdyn/convert_schema_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_schema_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertSchema(t *testing.T) { - var src = resources.Schema{ + src := resources.Schema{ CreateSchema: &catalog.CreateSchema{ Name: "name", CatalogName: "catalog", diff --git a/bundle/deploy/terraform/tfdyn/convert_volume_test.go b/bundle/deploy/terraform/tfdyn/convert_volume_test.go index c897ae69a..09b69489e 100644 --- a/bundle/deploy/terraform/tfdyn/convert_volume_test.go +++ b/bundle/deploy/terraform/tfdyn/convert_volume_test.go @@ -14,7 +14,7 @@ import ( ) func TestConvertVolume(t *testing.T) { - var src = resources.Volume{ + src := resources.Volume{ CreateVolumeRequestContent: &catalog.CreateVolumeRequestContent{ CatalogName: "catalog", Comment: "comment", diff --git a/bundle/deploy/terraform/tfdyn/rename_keys.go b/bundle/deploy/terraform/tfdyn/rename_keys.go index 650ffb890..95904575f 100644 --- a/bundle/deploy/terraform/tfdyn/rename_keys.go +++ b/bundle/deploy/terraform/tfdyn/rename_keys.go @@ -11,7 +11,7 @@ import ( // definition uses the plural name. This function can convert between the two. func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) { var err error - var acc = dyn.V(map[string]dyn.Value{}) + acc := dyn.V(map[string]dyn.Value{}) nv, err := dyn.Walk(v, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { if len(p) == 0 { @@ -36,7 +36,6 @@ func renameKeys(v dyn.Value, rename map[string]string) (dyn.Value, error) { // Pass through all other values. return v, dyn.ErrSkip }) - if err != nil { return dyn.InvalidValue, err } diff --git a/bundle/deploy/terraform/unbind.go b/bundle/deploy/terraform/unbind.go index 49d65615e..494cb7ef1 100644 --- a/bundle/deploy/terraform/unbind.go +++ b/bundle/deploy/terraform/unbind.go @@ -37,6 +37,6 @@ func (*unbind) Name() string { return "terraform.Unbind" } -func Unbind(resourceType string, resourceKey string) bundle.Mutator { +func Unbind(resourceType, resourceKey string) bundle.Mutator { return &unbind{resourceType: resourceType, resourceKey: resourceKey} } diff --git a/bundle/internal/bundletest/location.go b/bundle/internal/bundletest/location.go index 2ffd621bf..5dcd9d78f 100644 --- a/bundle/internal/bundletest/location.go +++ b/bundle/internal/bundletest/location.go @@ -10,7 +10,7 @@ import ( // with the path it is loaded from. func SetLocation(b *bundle.Bundle, prefix string, locations []dyn.Location) { start := dyn.MustPathFromString(prefix) - b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { + err := b.Config.Mutate(func(root dyn.Value) (dyn.Value, error) { return dyn.Walk(root, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { // If the path has the given prefix, set the location. if p.HasPrefix(start) { @@ -27,4 +27,7 @@ func SetLocation(b *bundle.Bundle, prefix string, locations []dyn.Location) { return v, dyn.ErrSkip }) }) + if err != nil { + panic("Mutate() failed: " + err.Error()) + } } diff --git a/bundle/internal/schema/main.go b/bundle/internal/schema/main.go index 881ce3496..cc06f0bbe 100644 --- a/bundle/internal/schema/main.go +++ b/bundle/internal/schema/main.go @@ -48,7 +48,8 @@ func addInterpolationPatterns(typ reflect.Type, s jsonschema.Schema) jsonschema. { Type: jsonschema.StringType, Pattern: interpolationPattern("var"), - }}, + }, + }, } case jsonschema.IntegerType, jsonschema.NumberType, jsonschema.BooleanType: // primitives can have variable values, or references like ${bundle.xyz} @@ -149,7 +150,7 @@ func main() { } // Write the schema descriptions to the output file. - err = os.WriteFile(outputFile, b, 0644) + err = os.WriteFile(outputFile, b, 0o644) if err != nil { log.Fatal(err) } diff --git a/bundle/internal/tf/codegen/generator/walker.go b/bundle/internal/tf/codegen/generator/walker.go index 2ed044c3d..0e9d73c4e 100644 --- a/bundle/internal/tf/codegen/generator/walker.go +++ b/bundle/internal/tf/codegen/generator/walker.go @@ -2,9 +2,8 @@ package generator import ( "fmt" - "strings" - "slices" + "strings" tfjson "github.com/hashicorp/terraform-json" "github.com/iancoleman/strcase" @@ -70,6 +69,25 @@ func nestedBlockKeys(block *tfjson.SchemaBlock) []string { return keys } +func nestedField(name []string, k string, isRef bool) field { + // Collect field properties. + fieldName := strcase.ToCamel(k) + fieldTypePrefix := "" + if isRef { + fieldTypePrefix = "*" + } else { + fieldTypePrefix = "[]" + } + fieldType := fmt.Sprintf("%s%s", fieldTypePrefix, strings.Join(append(name, strcase.ToCamel(k)), "")) + fieldTag := fmt.Sprintf("%s,omitempty", k) + + return field{ + Name: fieldName, + Type: fieldType, + Tag: fieldTag, + } +} + func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error { // Produce nested types before this block itself. // This ensures types are defined before they are referenced. @@ -91,10 +109,24 @@ func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error { v := block.Attributes[k] // Assert the attribute type is always set. - if v.AttributeType == cty.NilType { + if v.AttributeType == cty.NilType && v.AttributeNestedType == nil { return fmt.Errorf("unexpected nil type for attribute %s", k) } + // If there is a nested type, walk it and continue to next attribute. + if v.AttributeNestedType != nil { + nestedBlock := &tfjson.SchemaBlock{ + Attributes: v.AttributeNestedType.Attributes, + } + err := w.walk(nestedBlock, append(name, strcase.ToCamel(k))) + if err != nil { + return err + } + // Append to list of fields for type. + typ.Fields = append(typ.Fields, nestedField(name, k, v.AttributeNestedType.NestingMode == tfjson.SchemaNestingModeSingle)) + continue + } + // Collect field properties. fieldName := strcase.ToCamel(k) fieldType := processAttributeType(v.AttributeType) @@ -117,24 +149,8 @@ func (w *walker) walk(block *tfjson.SchemaBlock, name []string) error { // Declare nested blocks. for _, k := range nestedBlockKeys(block) { v := block.NestedBlocks[k] - - // Collect field properties. - fieldName := strcase.ToCamel(k) - fieldTypePrefix := "" - if v.MaxItems == 1 { - fieldTypePrefix = "*" - } else { - fieldTypePrefix = "[]" - } - fieldType := fmt.Sprintf("%s%s", fieldTypePrefix, strings.Join(append(name, strcase.ToCamel(k)), "")) - fieldTag := fmt.Sprintf("%s,omitempty", k) - // Append to list of fields for type. - typ.Fields = append(typ.Fields, field{ - Name: fieldName, - Type: fieldType, - Tag: fieldTag, - }) + typ.Fields = append(typ.Fields, nestedField(name, k, v.MaxItems == 1)) } // Append type to list of structs. diff --git a/bundle/internal/tf/codegen/go.mod b/bundle/internal/tf/codegen/go.mod index 67ac4bbc7..6279003cc 100644 --- a/bundle/internal/tf/codegen/go.mod +++ b/bundle/internal/tf/codegen/go.mod @@ -1,24 +1,27 @@ module github.com/databricks/cli/bundle/internal/tf/codegen -go 1.21 +go 1.23 + +toolchain go1.23.2 require ( - github.com/hashicorp/go-version v1.6.0 - github.com/hashicorp/hc-install v0.6.3 - github.com/hashicorp/terraform-exec v0.20.0 - github.com/hashicorp/terraform-json v0.21.0 + github.com/hashicorp/go-version v1.7.0 + github.com/hashicorp/hc-install v0.9.0 + github.com/hashicorp/terraform-exec v0.21.0 + github.com/hashicorp/terraform-json v0.23.0 github.com/iancoleman/strcase v0.3.0 - github.com/zclconf/go-cty v1.14.2 - golang.org/x/exp v0.0.0-20240213143201-ec583247a57a + github.com/zclconf/go-cty v1.15.1 + golang.org/x/exp v0.0.0-20241204233417-43b7b7cde48d ) require ( - github.com/ProtonMail/go-crypto v1.1.0-alpha.0 // indirect + github.com/ProtonMail/go-crypto v1.1.3 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect - github.com/cloudflare/circl v1.3.7 // indirect + github.com/cloudflare/circl v1.5.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - golang.org/x/crypto v0.19.0 // indirect - golang.org/x/mod v0.15.0 // indirect - golang.org/x/sys v0.17.0 // indirect - golang.org/x/text v0.14.0 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect + golang.org/x/crypto v0.31.0 // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/sys v0.28.0 // indirect + golang.org/x/text v0.21.0 // indirect ) diff --git a/bundle/internal/tf/codegen/go.sum b/bundle/internal/tf/codegen/go.sum index 7a4023ba5..1ce56777f 100644 --- a/bundle/internal/tf/codegen/go.sum +++ b/bundle/internal/tf/codegen/go.sum @@ -2,67 +2,79 @@ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0 h1:nHGfwXmFvJrSR9xu8qL7BkO4DqTHXE9N5vPhgY2I+j0= -github.com/ProtonMail/go-crypto v1.1.0-alpha.0/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= +github.com/ProtonMail/go-crypto v1.1.3 h1:nRBOetoydLeUb4nHajyO2bKqMLfWQ/ZPwkXqXxPxCFk= +github.com/ProtonMail/go-crypto v1.1.3/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= -github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= -github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= +github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= +github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= -github.com/go-git/go-git/v5 v5.11.0 h1:XIZc1p+8YzypNr34itUfSvYJcv+eYdTnTvOZ2vD3cA4= -github.com/go-git/go-git/v5 v5.11.0/go.mod h1:6GFcX2P3NM7FPBfpePbpLd21XxsgdAt+lKqXmCUiUCY= +github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= +github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hc-install v0.6.3 h1:yE/r1yJvWbtrJ0STwScgEnCanb0U9v7zp0Gbkmcoxqs= -github.com/hashicorp/hc-install v0.6.3/go.mod h1:KamGdbodYzlufbWh4r9NRo8y6GLHWZP2GBtdnms1Ln0= -github.com/hashicorp/terraform-exec v0.20.0 h1:DIZnPsqzPGuUnq6cH8jWcPunBfY+C+M8JyYF3vpnuEo= -github.com/hashicorp/terraform-exec v0.20.0/go.mod h1:ckKGkJWbsNqFKV1itgMnE0hY9IYf1HoiekpuN0eWoDw= -github.com/hashicorp/terraform-json v0.21.0 h1:9NQxbLNqPbEMze+S6+YluEdXgJmhQykRyRNd+zTI05U= -github.com/hashicorp/terraform-json v0.21.0/go.mod h1:qdeBs11ovMzo5puhrRibdD6d2Dq6TyE/28JiU4tIQxk= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/hc-install v0.9.0 h1:2dIk8LcvANwtv3QZLckxcjyF5w8KVtiMxu6G6eLhghE= +github.com/hashicorp/hc-install v0.9.0/go.mod h1:+6vOP+mf3tuGgMApVYtmsnDoKWMDcFXeTxCACYZ8SFg= +github.com/hashicorp/terraform-exec v0.21.0 h1:uNkLAe95ey5Uux6KJdua6+cv8asgILFVWkd/RG0D2XQ= +github.com/hashicorp/terraform-exec v0.21.0/go.mod h1:1PPeMYou+KDUSSeRE9szMZ/oHf4fYUmB923Wzbq1ICg= +github.com/hashicorp/terraform-json v0.23.0 h1:sniCkExU4iKtTADReHzACkk8fnpQXrdD2xoR+lppBkI= +github.com/hashicorp/terraform-json v0.23.0/go.mod h1:MHdXbBAbSg0GvzuWazEGKAn/cyNfIB7mN6y7KJN6y2c= github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI= github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= +github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= +github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= -github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= -github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= -github.com/skeema/knownhosts v1.2.1 h1:SHWdIUa82uGZz+F+47k8SY4QhhI291cXCpopT1lK2AQ= -github.com/skeema/knownhosts v1.2.1/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= +github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= +github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= -github.com/zclconf/go-cty v1.14.2 h1:kTG7lqmBou0Zkx35r6HJHUQTvaRPr5bIAf3AoHS0izI= -github.com/zclconf/go-cty v1.14.2/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/exp v0.0.0-20240213143201-ec583247a57a h1:HinSgX1tJRX3KsL//Gxynpw5CTOAIPhgL4W8PNiIpVE= -golang.org/x/exp v0.0.0-20240213143201-ec583247a57a/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= -golang.org/x/mod v0.15.0 h1:SernR4v+D55NyBH2QiEQrlBAnj1ECL6AGrA5+dPaMY8= -golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= -golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= -golang.org/x/sys v0.17.0 h1:25cE3gD+tdBA7lp7QfhuV+rJiE9YXTcS3VG1SqssI/Y= -golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ= -golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg= +github.com/zclconf/go-cty v1.15.1 h1:RgQYm4j2EvoBRXOPxhUvxPzRrGDo1eCOhHXuGfrj5S0= +github.com/zclconf/go-cty v1.15.1/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= +golang.org/x/exp v0.0.0-20241204233417-43b7b7cde48d h1:0olWaB5pg3+oychR51GUVCEsGkeCU/2JxjBgIo4f3M0= +golang.org/x/exp v0.0.0-20241204233417-43b7b7cde48d/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs= +golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= +golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8= +golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go index a778e0232..a3c5c6281 100644 --- a/bundle/internal/tf/codegen/schema/version.go +++ b/bundle/internal/tf/codegen/schema/version.go @@ -1,3 +1,3 @@ package schema -const ProviderVersion = "1.59.0" +const ProviderVersion = "1.61.0" diff --git a/bundle/internal/tf/codegen/templates/root.go.tmpl b/bundle/internal/tf/codegen/templates/root.go.tmpl index e03e978f0..b5c53c161 100644 --- a/bundle/internal/tf/codegen/templates/root.go.tmpl +++ b/bundle/internal/tf/codegen/templates/root.go.tmpl @@ -25,9 +25,9 @@ const ProviderVersion = "{{ .ProviderVersion }}" func NewRoot() *Root { return &Root{ - Terraform: map[string]interface{}{ - "required_providers": map[string]interface{}{ - "databricks": map[string]interface{}{ + Terraform: map[string]any{ + "required_providers": map[string]any{ + "databricks": map[string]any{ "source": ProviderSource, "version": ProviderVersion, }, diff --git a/bundle/internal/tf/schema/data_source_app.go b/bundle/internal/tf/schema/data_source_app.go new file mode 100644 index 000000000..9b4ef077e --- /dev/null +++ b/bundle/internal/tf/schema/data_source_app.go @@ -0,0 +1,107 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceAppAppActiveDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type DataSourceAppAppActiveDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppAppActiveDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *DataSourceAppAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *DataSourceAppAppActiveDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type DataSourceAppAppAppStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppAppComputeStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppAppPendingDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type DataSourceAppAppPendingDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppAppPendingDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *DataSourceAppAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *DataSourceAppAppPendingDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type DataSourceAppAppResourcesJob struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type DataSourceAppAppResourcesSecret struct { + Key string `json:"key"` + Permission string `json:"permission"` + Scope string `json:"scope"` +} + +type DataSourceAppAppResourcesServingEndpoint struct { + Name string `json:"name"` + Permission string `json:"permission"` +} + +type DataSourceAppAppResourcesSqlWarehouse struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type DataSourceAppAppResources struct { + Description string `json:"description,omitempty"` + Job *DataSourceAppAppResourcesJob `json:"job,omitempty"` + Name string `json:"name"` + Secret *DataSourceAppAppResourcesSecret `json:"secret,omitempty"` + ServingEndpoint *DataSourceAppAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"` + SqlWarehouse *DataSourceAppAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"` +} + +type DataSourceAppApp struct { + ActiveDeployment *DataSourceAppAppActiveDeployment `json:"active_deployment,omitempty"` + AppStatus *DataSourceAppAppAppStatus `json:"app_status,omitempty"` + ComputeStatus *DataSourceAppAppComputeStatus `json:"compute_status,omitempty"` + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DefaultSourceCodePath string `json:"default_source_code_path,omitempty"` + Description string `json:"description,omitempty"` + Name string `json:"name"` + PendingDeployment *DataSourceAppAppPendingDeployment `json:"pending_deployment,omitempty"` + Resources []DataSourceAppAppResources `json:"resources,omitempty"` + ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"` + ServicePrincipalId int `json:"service_principal_id,omitempty"` + ServicePrincipalName string `json:"service_principal_name,omitempty"` + UpdateTime string `json:"update_time,omitempty"` + Updater string `json:"updater,omitempty"` + Url string `json:"url,omitempty"` +} + +type DataSourceApp struct { + App *DataSourceAppApp `json:"app,omitempty"` + Name string `json:"name"` +} diff --git a/bundle/internal/tf/schema/data_source_apps.go b/bundle/internal/tf/schema/data_source_apps.go new file mode 100644 index 000000000..dd381eabf --- /dev/null +++ b/bundle/internal/tf/schema/data_source_apps.go @@ -0,0 +1,106 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceAppsAppActiveDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type DataSourceAppsAppActiveDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppsAppActiveDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *DataSourceAppsAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *DataSourceAppsAppActiveDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type DataSourceAppsAppAppStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppsAppComputeStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppsAppPendingDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type DataSourceAppsAppPendingDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type DataSourceAppsAppPendingDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *DataSourceAppsAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *DataSourceAppsAppPendingDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type DataSourceAppsAppResourcesJob struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type DataSourceAppsAppResourcesSecret struct { + Key string `json:"key"` + Permission string `json:"permission"` + Scope string `json:"scope"` +} + +type DataSourceAppsAppResourcesServingEndpoint struct { + Name string `json:"name"` + Permission string `json:"permission"` +} + +type DataSourceAppsAppResourcesSqlWarehouse struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type DataSourceAppsAppResources struct { + Description string `json:"description,omitempty"` + Job *DataSourceAppsAppResourcesJob `json:"job,omitempty"` + Name string `json:"name"` + Secret *DataSourceAppsAppResourcesSecret `json:"secret,omitempty"` + ServingEndpoint *DataSourceAppsAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"` + SqlWarehouse *DataSourceAppsAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"` +} + +type DataSourceAppsApp struct { + ActiveDeployment *DataSourceAppsAppActiveDeployment `json:"active_deployment,omitempty"` + AppStatus *DataSourceAppsAppAppStatus `json:"app_status,omitempty"` + ComputeStatus *DataSourceAppsAppComputeStatus `json:"compute_status,omitempty"` + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DefaultSourceCodePath string `json:"default_source_code_path,omitempty"` + Description string `json:"description,omitempty"` + Name string `json:"name"` + PendingDeployment *DataSourceAppsAppPendingDeployment `json:"pending_deployment,omitempty"` + Resources []DataSourceAppsAppResources `json:"resources,omitempty"` + ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"` + ServicePrincipalId int `json:"service_principal_id,omitempty"` + ServicePrincipalName string `json:"service_principal_name,omitempty"` + UpdateTime string `json:"update_time,omitempty"` + Updater string `json:"updater,omitempty"` + Url string `json:"url,omitempty"` +} + +type DataSourceApps struct { + App []DataSourceAppsApp `json:"app,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_functions.go b/bundle/internal/tf/schema/data_source_functions.go index 6085d7522..416db8fc8 100644 --- a/bundle/internal/tf/schema/data_source_functions.go +++ b/bundle/internal/tf/schema/data_source_functions.go @@ -69,6 +69,7 @@ type DataSourceFunctionsFunctions struct { FullDataType string `json:"full_data_type,omitempty"` FullName string `json:"full_name,omitempty"` FunctionId string `json:"function_id,omitempty"` + InputParams []DataSourceFunctionsFunctionsInputParams `json:"input_params,omitempty"` IsDeterministic bool `json:"is_deterministic,omitempty"` IsNullCall bool `json:"is_null_call,omitempty"` MetastoreId string `json:"metastore_id,omitempty"` @@ -76,8 +77,10 @@ type DataSourceFunctionsFunctions struct { Owner string `json:"owner,omitempty"` ParameterStyle string `json:"parameter_style,omitempty"` Properties string `json:"properties,omitempty"` + ReturnParams []DataSourceFunctionsFunctionsReturnParams `json:"return_params,omitempty"` RoutineBody string `json:"routine_body,omitempty"` RoutineDefinition string `json:"routine_definition,omitempty"` + RoutineDependencies []DataSourceFunctionsFunctionsRoutineDependencies `json:"routine_dependencies,omitempty"` SchemaName string `json:"schema_name,omitempty"` SecurityType string `json:"security_type,omitempty"` SpecificName string `json:"specific_name,omitempty"` @@ -85,14 +88,11 @@ type DataSourceFunctionsFunctions struct { SqlPath string `json:"sql_path,omitempty"` UpdatedAt int `json:"updated_at,omitempty"` UpdatedBy string `json:"updated_by,omitempty"` - InputParams []DataSourceFunctionsFunctionsInputParams `json:"input_params,omitempty"` - ReturnParams []DataSourceFunctionsFunctionsReturnParams `json:"return_params,omitempty"` - RoutineDependencies []DataSourceFunctionsFunctionsRoutineDependencies `json:"routine_dependencies,omitempty"` } type DataSourceFunctions struct { CatalogName string `json:"catalog_name"` + Functions []DataSourceFunctionsFunctions `json:"functions,omitempty"` IncludeBrowse bool `json:"include_browse,omitempty"` SchemaName string `json:"schema_name"` - Functions []DataSourceFunctionsFunctions `json:"functions,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_jobs.go b/bundle/internal/tf/schema/data_source_jobs.go index 98533c0c8..643f7a9f9 100644 --- a/bundle/internal/tf/schema/data_source_jobs.go +++ b/bundle/internal/tf/schema/data_source_jobs.go @@ -3,6 +3,7 @@ package schema type DataSourceJobs struct { - Id string `json:"id,omitempty"` - Ids map[string]string `json:"ids,omitempty"` + Id string `json:"id,omitempty"` + Ids map[string]string `json:"ids,omitempty"` + JobNameContains string `json:"job_name_contains,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_notification_destinations.go b/bundle/internal/tf/schema/data_source_notification_destinations.go index c95ad6db9..8447b497b 100644 --- a/bundle/internal/tf/schema/data_source_notification_destinations.go +++ b/bundle/internal/tf/schema/data_source_notification_destinations.go @@ -10,6 +10,6 @@ type DataSourceNotificationDestinationsNotificationDestinations struct { type DataSourceNotificationDestinations struct { DisplayNameContains string `json:"display_name_contains,omitempty"` - Type string `json:"type,omitempty"` NotificationDestinations []DataSourceNotificationDestinationsNotificationDestinations `json:"notification_destinations,omitempty"` + Type string `json:"type,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_registered_model.go b/bundle/internal/tf/schema/data_source_registered_model.go index e19e0849a..41d69ff8f 100644 --- a/bundle/internal/tf/schema/data_source_registered_model.go +++ b/bundle/internal/tf/schema/data_source_registered_model.go @@ -8,6 +8,7 @@ type DataSourceRegisteredModelModelInfoAliases struct { } type DataSourceRegisteredModelModelInfo struct { + Aliases []DataSourceRegisteredModelModelInfoAliases `json:"aliases,omitempty"` BrowseOnly bool `json:"browse_only,omitempty"` CatalogName string `json:"catalog_name,omitempty"` Comment string `json:"comment,omitempty"` @@ -21,7 +22,6 @@ type DataSourceRegisteredModelModelInfo struct { StorageLocation string `json:"storage_location,omitempty"` UpdatedAt int `json:"updated_at,omitempty"` UpdatedBy string `json:"updated_by,omitempty"` - Aliases []DataSourceRegisteredModelModelInfoAliases `json:"aliases,omitempty"` } type DataSourceRegisteredModel struct { diff --git a/bundle/internal/tf/schema/data_source_registered_model_versions.go b/bundle/internal/tf/schema/data_source_registered_model_versions.go index f70e58f85..1a670dfbc 100644 --- a/bundle/internal/tf/schema/data_source_registered_model_versions.go +++ b/bundle/internal/tf/schema/data_source_registered_model_versions.go @@ -25,6 +25,7 @@ type DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies stru } type DataSourceRegisteredModelVersionsModelVersions struct { + Aliases []DataSourceRegisteredModelVersionsModelVersionsAliases `json:"aliases,omitempty"` BrowseOnly bool `json:"browse_only,omitempty"` CatalogName string `json:"catalog_name,omitempty"` Comment string `json:"comment,omitempty"` @@ -33,6 +34,7 @@ type DataSourceRegisteredModelVersionsModelVersions struct { Id string `json:"id,omitempty"` MetastoreId string `json:"metastore_id,omitempty"` ModelName string `json:"model_name,omitempty"` + ModelVersionDependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies `json:"model_version_dependencies,omitempty"` RunId string `json:"run_id,omitempty"` RunWorkspaceId int `json:"run_workspace_id,omitempty"` SchemaName string `json:"schema_name,omitempty"` @@ -42,8 +44,6 @@ type DataSourceRegisteredModelVersionsModelVersions struct { UpdatedAt int `json:"updated_at,omitempty"` UpdatedBy string `json:"updated_by,omitempty"` Version int `json:"version,omitempty"` - Aliases []DataSourceRegisteredModelVersionsModelVersionsAliases `json:"aliases,omitempty"` - ModelVersionDependencies []DataSourceRegisteredModelVersionsModelVersionsModelVersionDependencies `json:"model_version_dependencies,omitempty"` } type DataSourceRegisteredModelVersions struct { diff --git a/bundle/internal/tf/schema/data_source_serving_endpoints.go b/bundle/internal/tf/schema/data_source_serving_endpoints.go index 028121b5a..bdfd778e0 100644 --- a/bundle/internal/tf/schema/data_source_serving_endpoints.go +++ b/bundle/internal/tf/schema/data_source_serving_endpoints.go @@ -8,9 +8,9 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii struct { type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInput struct { InvalidKeywords []string `json:"invalid_keywords,omitempty"` + Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii `json:"pii,omitempty"` Safety bool `json:"safety,omitempty"` ValidTopics []string `json:"valid_topics,omitempty"` - Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsInputPii `json:"pii,omitempty"` } type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct { @@ -19,9 +19,9 @@ type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii struct { type DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutput struct { InvalidKeywords []string `json:"invalid_keywords,omitempty"` + Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii `json:"pii,omitempty"` Safety bool `json:"safety,omitempty"` ValidTopics []string `json:"valid_topics,omitempty"` - Pii []DataSourceServingEndpointsEndpointsAiGatewayGuardrailsOutputPii `json:"pii,omitempty"` } type DataSourceServingEndpointsEndpointsAiGatewayGuardrails struct { @@ -111,17 +111,17 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmCon } type DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel struct { - Name string `json:"name"` - Provider string `json:"provider"` - Task string `json:"task"` Ai21LabsConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAi21LabsConfig `json:"ai21labs_config,omitempty"` AmazonBedrockConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAmazonBedrockConfig `json:"amazon_bedrock_config,omitempty"` AnthropicConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelAnthropicConfig `json:"anthropic_config,omitempty"` CohereConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelCohereConfig `json:"cohere_config,omitempty"` DatabricksModelServingConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelDatabricksModelServingConfig `json:"databricks_model_serving_config,omitempty"` GoogleCloudVertexAiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelGoogleCloudVertexAiConfig `json:"google_cloud_vertex_ai_config,omitempty"` + Name string `json:"name"` OpenaiConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelOpenaiConfig `json:"openai_config,omitempty"` PalmConfig []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModelPalmConfig `json:"palm_config,omitempty"` + Provider string `json:"provider"` + Task string `json:"task"` } type DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel struct { @@ -134,9 +134,9 @@ type DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel stru type DataSourceServingEndpointsEndpointsConfigServedEntities struct { EntityName string `json:"entity_name,omitempty"` EntityVersion string `json:"entity_version,omitempty"` - Name string `json:"name,omitempty"` ExternalModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesExternalModel `json:"external_model,omitempty"` FoundationModel []DataSourceServingEndpointsEndpointsConfigServedEntitiesFoundationModel `json:"foundation_model,omitempty"` + Name string `json:"name,omitempty"` } type DataSourceServingEndpointsEndpointsConfigServedModels struct { @@ -161,16 +161,16 @@ type DataSourceServingEndpointsEndpointsTags struct { } type DataSourceServingEndpointsEndpoints struct { + AiGateway []DataSourceServingEndpointsEndpointsAiGateway `json:"ai_gateway,omitempty"` + Config []DataSourceServingEndpointsEndpointsConfig `json:"config,omitempty"` CreationTimestamp int `json:"creation_timestamp,omitempty"` Creator string `json:"creator,omitempty"` Id string `json:"id,omitempty"` LastUpdatedTimestamp int `json:"last_updated_timestamp,omitempty"` Name string `json:"name,omitempty"` - Task string `json:"task,omitempty"` - AiGateway []DataSourceServingEndpointsEndpointsAiGateway `json:"ai_gateway,omitempty"` - Config []DataSourceServingEndpointsEndpointsConfig `json:"config,omitempty"` State []DataSourceServingEndpointsEndpointsState `json:"state,omitempty"` Tags []DataSourceServingEndpointsEndpointsTags `json:"tags,omitempty"` + Task string `json:"task,omitempty"` } type DataSourceServingEndpoints struct { diff --git a/bundle/internal/tf/schema/data_sources.go b/bundle/internal/tf/schema/data_sources.go index 3a59bf8c3..1880db25a 100644 --- a/bundle/internal/tf/schema/data_sources.go +++ b/bundle/internal/tf/schema/data_sources.go @@ -3,6 +3,8 @@ package schema type DataSources struct { + App map[string]any `json:"databricks_app,omitempty"` + Apps map[string]any `json:"databricks_apps,omitempty"` AwsAssumeRolePolicy map[string]any `json:"databricks_aws_assume_role_policy,omitempty"` AwsBucketPolicy map[string]any `json:"databricks_aws_bucket_policy,omitempty"` AwsCrossaccountPolicy map[string]any `json:"databricks_aws_crossaccount_policy,omitempty"` @@ -66,6 +68,8 @@ type DataSources struct { func NewDataSources() *DataSources { return &DataSources{ + App: make(map[string]any), + Apps: make(map[string]any), AwsAssumeRolePolicy: make(map[string]any), AwsBucketPolicy: make(map[string]any), AwsCrossaccountPolicy: make(map[string]any), diff --git a/bundle/internal/tf/schema/resource_app.go b/bundle/internal/tf/schema/resource_app.go new file mode 100644 index 000000000..14c93b793 --- /dev/null +++ b/bundle/internal/tf/schema/resource_app.go @@ -0,0 +1,102 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceAppActiveDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type ResourceAppActiveDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type ResourceAppActiveDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *ResourceAppActiveDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *ResourceAppActiveDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type ResourceAppAppStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type ResourceAppComputeStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type ResourceAppPendingDeploymentDeploymentArtifacts struct { + SourceCodePath string `json:"source_code_path,omitempty"` +} + +type ResourceAppPendingDeploymentStatus struct { + Message string `json:"message,omitempty"` + State string `json:"state,omitempty"` +} + +type ResourceAppPendingDeployment struct { + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DeploymentArtifacts *ResourceAppPendingDeploymentDeploymentArtifacts `json:"deployment_artifacts,omitempty"` + DeploymentId string `json:"deployment_id,omitempty"` + Mode string `json:"mode,omitempty"` + SourceCodePath string `json:"source_code_path,omitempty"` + Status *ResourceAppPendingDeploymentStatus `json:"status,omitempty"` + UpdateTime string `json:"update_time,omitempty"` +} + +type ResourceAppResourcesJob struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type ResourceAppResourcesSecret struct { + Key string `json:"key"` + Permission string `json:"permission"` + Scope string `json:"scope"` +} + +type ResourceAppResourcesServingEndpoint struct { + Name string `json:"name"` + Permission string `json:"permission"` +} + +type ResourceAppResourcesSqlWarehouse struct { + Id string `json:"id"` + Permission string `json:"permission"` +} + +type ResourceAppResources struct { + Description string `json:"description,omitempty"` + Job *ResourceAppResourcesJob `json:"job,omitempty"` + Name string `json:"name"` + Secret *ResourceAppResourcesSecret `json:"secret,omitempty"` + ServingEndpoint *ResourceAppResourcesServingEndpoint `json:"serving_endpoint,omitempty"` + SqlWarehouse *ResourceAppResourcesSqlWarehouse `json:"sql_warehouse,omitempty"` +} + +type ResourceApp struct { + ActiveDeployment *ResourceAppActiveDeployment `json:"active_deployment,omitempty"` + AppStatus *ResourceAppAppStatus `json:"app_status,omitempty"` + ComputeStatus *ResourceAppComputeStatus `json:"compute_status,omitempty"` + CreateTime string `json:"create_time,omitempty"` + Creator string `json:"creator,omitempty"` + DefaultSourceCodePath string `json:"default_source_code_path,omitempty"` + Description string `json:"description,omitempty"` + Name string `json:"name"` + PendingDeployment *ResourceAppPendingDeployment `json:"pending_deployment,omitempty"` + Resources []ResourceAppResources `json:"resources,omitempty"` + ServicePrincipalClientId string `json:"service_principal_client_id,omitempty"` + ServicePrincipalId int `json:"service_principal_id,omitempty"` + ServicePrincipalName string `json:"service_principal_name,omitempty"` + UpdateTime string `json:"update_time,omitempty"` + Updater string `json:"updater,omitempty"` + Url string `json:"url,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_azure_adls_gen2_mount.go b/bundle/internal/tf/schema/resource_azure_adls_gen2_mount.go index d0f96d54e..6e2ea08e8 100644 --- a/bundle/internal/tf/schema/resource_azure_adls_gen2_mount.go +++ b/bundle/internal/tf/schema/resource_azure_adls_gen2_mount.go @@ -9,6 +9,7 @@ type ResourceAzureAdlsGen2Mount struct { ClusterId string `json:"cluster_id,omitempty"` ContainerName string `json:"container_name"` Directory string `json:"directory,omitempty"` + Environment string `json:"environment,omitempty"` Id string `json:"id,omitempty"` InitializeFileSystem bool `json:"initialize_file_system"` MountName string `json:"mount_name"` diff --git a/bundle/internal/tf/schema/resource_credential.go b/bundle/internal/tf/schema/resource_credential.go new file mode 100644 index 000000000..9d47219ea --- /dev/null +++ b/bundle/internal/tf/schema/resource_credential.go @@ -0,0 +1,52 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceCredentialAwsIamRole struct { + ExternalId string `json:"external_id,omitempty"` + RoleArn string `json:"role_arn,omitempty"` + UnityCatalogIamArn string `json:"unity_catalog_iam_arn,omitempty"` +} + +type ResourceCredentialAzureManagedIdentity struct { + AccessConnectorId string `json:"access_connector_id"` + CredentialId string `json:"credential_id,omitempty"` + ManagedIdentityId string `json:"managed_identity_id,omitempty"` +} + +type ResourceCredentialAzureServicePrincipal struct { + ApplicationId string `json:"application_id"` + ClientSecret string `json:"client_secret"` + DirectoryId string `json:"directory_id"` +} + +type ResourceCredentialDatabricksGcpServiceAccount struct { + CredentialId string `json:"credential_id,omitempty"` + Email string `json:"email,omitempty"` + PrivateKeyId string `json:"private_key_id,omitempty"` +} + +type ResourceCredential struct { + Comment string `json:"comment,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + CreatedBy string `json:"created_by,omitempty"` + CredentialId string `json:"credential_id,omitempty"` + ForceDestroy bool `json:"force_destroy,omitempty"` + ForceUpdate bool `json:"force_update,omitempty"` + FullName string `json:"full_name,omitempty"` + Id string `json:"id,omitempty"` + IsolationMode string `json:"isolation_mode,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name"` + Owner string `json:"owner,omitempty"` + Purpose string `json:"purpose"` + ReadOnly bool `json:"read_only,omitempty"` + SkipValidation bool `json:"skip_validation,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` + UsedForManagedStorage bool `json:"used_for_managed_storage,omitempty"` + AwsIamRole *ResourceCredentialAwsIamRole `json:"aws_iam_role,omitempty"` + AzureManagedIdentity *ResourceCredentialAzureManagedIdentity `json:"azure_managed_identity,omitempty"` + AzureServicePrincipal *ResourceCredentialAzureServicePrincipal `json:"azure_service_principal,omitempty"` + DatabricksGcpServiceAccount *ResourceCredentialDatabricksGcpServiceAccount `json:"databricks_gcp_service_account,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_grant.go b/bundle/internal/tf/schema/resource_grant.go index d8569f304..6ed97791c 100644 --- a/bundle/internal/tf/schema/resource_grant.go +++ b/bundle/internal/tf/schema/resource_grant.go @@ -4,6 +4,7 @@ package schema type ResourceGrant struct { Catalog string `json:"catalog,omitempty"` + Credential string `json:"credential,omitempty"` ExternalLocation string `json:"external_location,omitempty"` ForeignConnection string `json:"foreign_connection,omitempty"` Function string `json:"function,omitempty"` diff --git a/bundle/internal/tf/schema/resource_grants.go b/bundle/internal/tf/schema/resource_grants.go index dd00152fb..474a9950f 100644 --- a/bundle/internal/tf/schema/resource_grants.go +++ b/bundle/internal/tf/schema/resource_grants.go @@ -9,6 +9,7 @@ type ResourceGrantsGrant struct { type ResourceGrants struct { Catalog string `json:"catalog,omitempty"` + Credential string `json:"credential,omitempty"` ExternalLocation string `json:"external_location,omitempty"` ForeignConnection string `json:"foreign_connection,omitempty"` Function string `json:"function,omitempty"` diff --git a/bundle/internal/tf/schema/resource_permissions.go b/bundle/internal/tf/schema/resource_permissions.go index a3d05e6f2..7dfb84b5f 100644 --- a/bundle/internal/tf/schema/resource_permissions.go +++ b/bundle/internal/tf/schema/resource_permissions.go @@ -10,6 +10,7 @@ type ResourcePermissionsAccessControl struct { } type ResourcePermissions struct { + AppName string `json:"app_name,omitempty"` Authorization string `json:"authorization,omitempty"` ClusterId string `json:"cluster_id,omitempty"` ClusterPolicyId string `json:"cluster_policy_id,omitempty"` diff --git a/bundle/internal/tf/schema/resources.go b/bundle/internal/tf/schema/resources.go index ea5b618fd..b57c2711a 100644 --- a/bundle/internal/tf/schema/resources.go +++ b/bundle/internal/tf/schema/resources.go @@ -5,6 +5,7 @@ package schema type Resources struct { AccessControlRuleSet map[string]any `json:"databricks_access_control_rule_set,omitempty"` Alert map[string]any `json:"databricks_alert,omitempty"` + App map[string]any `json:"databricks_app,omitempty"` ArtifactAllowlist map[string]any `json:"databricks_artifact_allowlist,omitempty"` AutomaticClusterUpdateWorkspaceSetting map[string]any `json:"databricks_automatic_cluster_update_workspace_setting,omitempty"` AwsS3Mount map[string]any `json:"databricks_aws_s3_mount,omitempty"` @@ -18,6 +19,7 @@ type Resources struct { ClusterPolicy map[string]any `json:"databricks_cluster_policy,omitempty"` ComplianceSecurityProfileWorkspaceSetting map[string]any `json:"databricks_compliance_security_profile_workspace_setting,omitempty"` Connection map[string]any `json:"databricks_connection,omitempty"` + Credential map[string]any `json:"databricks_credential,omitempty"` CustomAppIntegration map[string]any `json:"databricks_custom_app_integration,omitempty"` Dashboard map[string]any `json:"databricks_dashboard,omitempty"` DbfsFile map[string]any `json:"databricks_dbfs_file,omitempty"` @@ -111,6 +113,7 @@ func NewResources() *Resources { return &Resources{ AccessControlRuleSet: make(map[string]any), Alert: make(map[string]any), + App: make(map[string]any), ArtifactAllowlist: make(map[string]any), AutomaticClusterUpdateWorkspaceSetting: make(map[string]any), AwsS3Mount: make(map[string]any), @@ -124,6 +127,7 @@ func NewResources() *Resources { ClusterPolicy: make(map[string]any), ComplianceSecurityProfileWorkspaceSetting: make(map[string]any), Connection: make(map[string]any), + Credential: make(map[string]any), CustomAppIntegration: make(map[string]any), Dashboard: make(map[string]any), DbfsFile: make(map[string]any), diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 2cadb8090..6befba596 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -21,7 +21,7 @@ type Root struct { const ProviderHost = "registry.terraform.io" const ProviderSource = "databricks/databricks" -const ProviderVersion = "1.59.0" +const ProviderVersion = "1.61.0" func NewRoot() *Root { return &Root{ diff --git a/bundle/libraries/expand_glob_references.go b/bundle/libraries/expand_glob_references.go index c71615e0e..bb1905045 100644 --- a/bundle/libraries/expand_glob_references.go +++ b/bundle/libraries/expand_glob_references.go @@ -11,8 +11,7 @@ import ( "github.com/databricks/cli/libs/dyn" ) -type expand struct { -} +type expand struct{} func matchError(p dyn.Path, l []dyn.Location, message string) diag.Diagnostic { return diag.Diagnostic{ @@ -189,7 +188,6 @@ func (e *expand) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { diags = diags.Extend(d) return dyn.V(output), nil }) - if err != nil { return dyn.InvalidValue, err } @@ -197,7 +195,6 @@ func (e *expand) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { return v, nil }) - if err != nil { diags = diags.Extend(diag.FromErr(err)) } diff --git a/bundle/libraries/filer_volume_test.go b/bundle/libraries/filer_volume_test.go index 0d886824d..7b2f5c5ba 100644 --- a/bundle/libraries/filer_volume_test.go +++ b/bundle/libraries/filer_volume_test.go @@ -110,7 +110,8 @@ func TestFilerForVolumeForErrorFromAPI(t *testing.T) { Summary: "unable to determine if volume at /Volumes/main/my_schema/my_volume exists: error from API", Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, - }}, diags) + }, + }, diags) } func TestFilerForVolumeWithVolumeNotFound(t *testing.T) { @@ -136,7 +137,8 @@ func TestFilerForVolumeWithVolumeNotFound(t *testing.T) { Summary: "volume /Volumes/main/my_schema/doesnotexist does not exist: some error message", Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path")}, - }}, diags) + }, + }, diags) } func TestFilerForVolumeNotFoundAndInBundle(t *testing.T) { @@ -173,7 +175,7 @@ func TestFilerForVolumeNotFoundAndInBundle(t *testing.T) { { Severity: diag.Error, Summary: "volume /Volumes/main/my_schema/my_volume does not exist: error from API", - Locations: []dyn.Location{{"config.yml", 1, 2}, {"volume.yml", 1, 2}}, + Locations: []dyn.Location{{File: "config.yml", Line: 1, Column: 2}, {File: "volume.yml", Line: 1, Column: 2}}, Paths: []dyn.Path{dyn.MustPathFromString("workspace.artifact_path"), dyn.MustPathFromString("resources.volumes.foo")}, Detail: `You are using a volume in your artifact_path that is managed by this bundle but which has not been deployed yet. Please first deploy diff --git a/bundle/libraries/upload.go b/bundle/libraries/upload.go index 4b6f43701..a2162fb7b 100644 --- a/bundle/libraries/upload.go +++ b/bundle/libraries/upload.go @@ -81,7 +81,6 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error return v, nil }) }) - if err != nil { return nil, err } @@ -119,7 +118,6 @@ func collectLocalLibraries(b *bundle.Bundle) (map[string][]configLocation, error return v, nil }) }) - if err != nil { return nil, err } @@ -175,7 +173,6 @@ func (u *upload) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics { return v, nil }) - if err != nil { diags = diags.Extend(diag.FromErr(err)) } diff --git a/bundle/permissions/filter.go b/bundle/permissions/filter.go index 60264f6ea..6fa8d1374 100644 --- a/bundle/permissions/filter.go +++ b/bundle/permissions/filter.go @@ -56,7 +56,6 @@ func filter(currentUser string) dyn.WalkValueFunc { } return v, nil - } } diff --git a/bundle/permissions/filter_test.go b/bundle/permissions/filter_test.go index 121ce10dc..e6e5a3799 100644 --- a/bundle/permissions/filter_test.go +++ b/bundle/permissions/filter_test.go @@ -90,7 +90,6 @@ func testFixture(userName string) *bundle.Bundle { }, }, } - } func TestFilterCurrentUser(t *testing.T) { diff --git a/bundle/permissions/mutator.go b/bundle/permissions/mutator.go index bc1392d93..cd7cbf40c 100644 --- a/bundle/permissions/mutator.go +++ b/bundle/permissions/mutator.go @@ -7,43 +7,52 @@ import ( "strings" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/libs/diag" + "github.com/databricks/cli/libs/dyn" + "github.com/databricks/cli/libs/dyn/convert" ) -const CAN_MANAGE = "CAN_MANAGE" -const CAN_VIEW = "CAN_VIEW" -const CAN_RUN = "CAN_RUN" +const ( + CAN_MANAGE = "CAN_MANAGE" + CAN_VIEW = "CAN_VIEW" + CAN_RUN = "CAN_RUN" +) -var allowedLevels = []string{CAN_MANAGE, CAN_VIEW, CAN_RUN} -var levelsMap = map[string](map[string]string){ - "jobs": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_VIEW", - CAN_RUN: "CAN_MANAGE_RUN", - }, - "pipelines": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_VIEW", - CAN_RUN: "CAN_RUN", - }, - "mlflow_experiments": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_READ", - }, - "mlflow_models": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_READ", - }, - "model_serving_endpoints": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_VIEW", - CAN_RUN: "CAN_QUERY", - }, - "dashboards": { - CAN_MANAGE: "CAN_MANAGE", - CAN_VIEW: "CAN_READ", - }, -} +var unsupportedResources = []string{"clusters", "volumes", "schemas", "quality_monitors", "registered_models"} + +var ( + allowedLevels = []string{CAN_MANAGE, CAN_VIEW, CAN_RUN} + levelsMap = map[string](map[string]string){ + "jobs": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_VIEW", + CAN_RUN: "CAN_MANAGE_RUN", + }, + "pipelines": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_VIEW", + CAN_RUN: "CAN_RUN", + }, + "experiments": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_READ", + }, + "models": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_READ", + }, + "model_serving_endpoints": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_VIEW", + CAN_RUN: "CAN_QUERY", + }, + "dashboards": { + CAN_MANAGE: "CAN_MANAGE", + CAN_VIEW: "CAN_READ", + }, + } +) type bundlePermissions struct{} @@ -57,11 +66,55 @@ func (m *bundlePermissions) Apply(ctx context.Context, b *bundle.Bundle) diag.Di return diag.FromErr(err) } - applyForJobs(ctx, b) - applyForPipelines(ctx, b) - applyForMlModels(ctx, b) - applyForMlExperiments(ctx, b) - applyForModelServiceEndpoints(ctx, b) + patterns := make(map[string]dyn.Pattern, 0) + for key := range levelsMap { + patterns[key] = dyn.NewPattern( + dyn.Key("resources"), + dyn.Key(key), + dyn.AnyKey(), + ) + } + + err = b.Config.Mutate(func(v dyn.Value) (dyn.Value, error) { + for key, pattern := range patterns { + v, err = dyn.MapByPattern(v, pattern, func(p dyn.Path, v dyn.Value) (dyn.Value, error) { + var permissions []resources.Permission + pv, err := dyn.Get(v, "permissions") + // If the permissions field is not found, we set to an empty array + if err != nil { + pv = dyn.V([]dyn.Value{}) + } + + err = convert.ToTyped(&permissions, pv) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to convert permissions: %w", err) + } + + permissions = append(permissions, convertPermissions( + ctx, + b.Config.Permissions, + permissions, + key, + levelsMap[key], + )...) + + pv, err = convert.FromTyped(permissions, dyn.NilValue) + if err != nil { + return dyn.InvalidValue, fmt.Errorf("failed to convert permissions: %w", err) + } + + return dyn.Set(v, "permissions", pv) + }) + if err != nil { + return dyn.InvalidValue, err + } + } + + return v, nil + }) + if err != nil { + return diag.FromErr(err) + } return nil } @@ -76,66 +129,6 @@ func validate(b *bundle.Bundle) error { return nil } -func applyForJobs(ctx context.Context, b *bundle.Bundle) { - for key, job := range b.Config.Resources.Jobs { - job.Permissions = append(job.Permissions, convert( - ctx, - b.Config.Permissions, - job.Permissions, - key, - levelsMap["jobs"], - )...) - } -} - -func applyForPipelines(ctx context.Context, b *bundle.Bundle) { - for key, pipeline := range b.Config.Resources.Pipelines { - pipeline.Permissions = append(pipeline.Permissions, convert( - ctx, - b.Config.Permissions, - pipeline.Permissions, - key, - levelsMap["pipelines"], - )...) - } -} - -func applyForMlExperiments(ctx context.Context, b *bundle.Bundle) { - for key, experiment := range b.Config.Resources.Experiments { - experiment.Permissions = append(experiment.Permissions, convert( - ctx, - b.Config.Permissions, - experiment.Permissions, - key, - levelsMap["mlflow_experiments"], - )...) - } -} - -func applyForMlModels(ctx context.Context, b *bundle.Bundle) { - for key, model := range b.Config.Resources.Models { - model.Permissions = append(model.Permissions, convert( - ctx, - b.Config.Permissions, - model.Permissions, - key, - levelsMap["mlflow_models"], - )...) - } -} - -func applyForModelServiceEndpoints(ctx context.Context, b *bundle.Bundle) { - for key, model := range b.Config.Resources.ModelServingEndpoints { - model.Permissions = append(model.Permissions, convert( - ctx, - b.Config.Permissions, - model.Permissions, - key, - levelsMap["model_serving_endpoints"], - )...) - } -} - func (m *bundlePermissions) Name() string { return "ApplyBundlePermissions" } diff --git a/bundle/permissions/mutator_test.go b/bundle/permissions/mutator_test.go index 1a177d902..78703e90f 100644 --- a/bundle/permissions/mutator_test.go +++ b/bundle/permissions/mutator_test.go @@ -2,12 +2,15 @@ package permissions import ( "context" + "fmt" + "slices" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -51,6 +54,10 @@ func TestApplyBundlePermissions(t *testing.T) { "endpoint_1": {}, "endpoint_2": {}, }, + Dashboards: map[string]*resources.Dashboard{ + "dashboard_1": {}, + "dashboard_2": {}, + }, }, }, } @@ -103,6 +110,10 @@ func TestApplyBundlePermissions(t *testing.T) { require.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint_2"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) require.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint_2"].Permissions, resources.Permission{Level: "CAN_VIEW", GroupName: "TestGroup"}) require.Contains(t, b.Config.Resources.ModelServingEndpoints["endpoint_2"].Permissions, resources.Permission{Level: "CAN_QUERY", ServicePrincipalName: "TestServicePrincipal"}) + + require.Len(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, 2) + require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) + require.Contains(t, b.Config.Resources.Dashboards["dashboard_1"].Permissions, resources.Permission{Level: "CAN_READ", GroupName: "TestGroup"}) } func TestWarningOnOverlapPermission(t *testing.T) { @@ -146,5 +157,20 @@ func TestWarningOnOverlapPermission(t *testing.T) { require.Contains(t, b.Config.Resources.Jobs["job_2"].Permissions, resources.Permission{Level: "CAN_VIEW", UserName: "TestUser2"}) require.Contains(t, b.Config.Resources.Jobs["job_2"].Permissions, resources.Permission{Level: "CAN_MANAGE", UserName: "TestUser"}) require.Contains(t, b.Config.Resources.Jobs["job_2"].Permissions, resources.Permission{Level: "CAN_VIEW", GroupName: "TestGroup"}) - +} + +func TestAllResourcesExplicitlyDefinedForPermissionsSupport(t *testing.T) { + r := config.Resources{} + + for _, resource := range unsupportedResources { + _, ok := levelsMap[resource] + assert.False(t, ok, fmt.Sprintf("Resource %s is defined in both levelsMap and unsupportedResources", resource)) + } + + for _, resource := range r.AllResources() { + _, ok := levelsMap[resource.Description.PluralName] + if !slices.Contains(unsupportedResources, resource.Description.PluralName) && !ok { + assert.Fail(t, fmt.Sprintf("Resource %s is not explicitly defined in levelsMap or unsupportedResources", resource.Description.PluralName)) + } + } } diff --git a/bundle/permissions/utils.go b/bundle/permissions/utils.go index 9072cd252..cf16ea9b2 100644 --- a/bundle/permissions/utils.go +++ b/bundle/permissions/utils.go @@ -7,7 +7,7 @@ import ( "github.com/databricks/cli/libs/diag" ) -func convert( +func convertPermissions( ctx context.Context, bundlePermissions []resources.Permission, resourcePermissions []resources.Permission, diff --git a/bundle/permissions/validate.go b/bundle/permissions/validate.go index f1a18f430..dee7326cf 100644 --- a/bundle/permissions/validate.go +++ b/bundle/permissions/validate.go @@ -9,8 +9,7 @@ import ( "github.com/databricks/cli/libs/diag" ) -type validateSharedRootPermissions struct { -} +type validateSharedRootPermissions struct{} func ValidateSharedRootPermissions() bundle.Mutator { return &validateSharedRootPermissions{} diff --git a/bundle/permissions/workspace_path_permissions.go b/bundle/permissions/workspace_path_permissions.go index a3b4424c1..225d2499e 100644 --- a/bundle/permissions/workspace_path_permissions.go +++ b/bundle/permissions/workspace_path_permissions.go @@ -52,7 +52,7 @@ func (p WorkspacePathPermissions) Compare(perms []resources.Permission) diag.Dia } // containsAll checks if permA contains all permissions in permB. -func containsAll(permA []resources.Permission, permB []resources.Permission) (bool, []resources.Permission) { +func containsAll(permA, permB []resources.Permission) (bool, []resources.Permission) { missing := make([]resources.Permission, 0) for _, a := range permA { found := false diff --git a/bundle/permissions/workspace_path_permissions_test.go b/bundle/permissions/workspace_path_permissions_test.go index 0bb00474c..eaefad906 100644 --- a/bundle/permissions/workspace_path_permissions_test.go +++ b/bundle/permissions/workspace_path_permissions_test.go @@ -117,5 +117,4 @@ func TestWorkspacePathPermissionsCompare(t *testing.T) { diags := wp.Compare(tc.perms) require.Equal(t, tc.expected, diags) } - } diff --git a/bundle/permissions/workspace_root.go b/bundle/permissions/workspace_root.go index de4f3a7fe..4ac0d38a5 100644 --- a/bundle/permissions/workspace_root.go +++ b/bundle/permissions/workspace_root.go @@ -12,8 +12,7 @@ import ( "golang.org/x/sync/errgroup" ) -type workspaceRootPermissions struct { -} +type workspaceRootPermissions struct{} func ApplyWorkspaceRootPermissions() bundle.Mutator { return &workspaceRootPermissions{} diff --git a/bundle/phases/bind.go b/bundle/phases/bind.go index b2e92d6e2..c62c48aea 100644 --- a/bundle/phases/bind.go +++ b/bundle/phases/bind.go @@ -25,7 +25,7 @@ func Bind(opts *terraform.BindOptions) bundle.Mutator { ) } -func Unbind(resourceType string, resourceKey string) bundle.Mutator { +func Unbind(resourceType, resourceKey string) bundle.Mutator { return newPhase( "unbind", []bundle.Mutator{ diff --git a/bundle/render/render_text_output.go b/bundle/render/render_text_output.go index 92dacb448..bacb85735 100644 --- a/bundle/render/render_text_output.go +++ b/bundle/render/render_text_output.go @@ -110,7 +110,7 @@ func renderSummaryHeaderTemplate(out io.Writer, b *bundle.Bundle) error { return renderSummaryHeaderTemplate(out, &bundle.Bundle{}) } - var currentUser = &iam.User{} + currentUser := &iam.User{} if b.Config.Workspace.CurrentUser != nil { if b.Config.Workspace.CurrentUser.User != nil { @@ -171,10 +171,16 @@ func RenderDiagnostics(out io.Writer, b *bundle.Bundle, diags diag.Diagnostics, if err != nil { return fmt.Errorf("failed to render summary: %w", err) } - io.WriteString(out, "\n") + _, err = io.WriteString(out, "\n") + if err != nil { + return err + } } trailer := buildTrailer(diags) - io.WriteString(out, trailer) + _, err = io.WriteString(out, trailer) + if err != nil { + return err + } } return nil diff --git a/bundle/render/render_text_output_test.go b/bundle/render/render_text_output_test.go index 135d79dae..506756f70 100644 --- a/bundle/render/render_text_output_test.go +++ b/bundle/render/render_text_output_test.go @@ -376,7 +376,8 @@ func TestRenderDiagnostics(t *testing.T) { Locations: []dyn.Location{{ File: "foo.yaml", Line: 1, - Column: 2}}, + Column: 2, + }}, }, }, expected: "Error: failed to load xxx\n" + @@ -489,7 +490,8 @@ func TestRenderSummaryTemplate_nilBundle(t *testing.T) { err := renderSummaryHeaderTemplate(writer, nil) require.NoError(t, err) - io.WriteString(writer, buildTrailer(nil)) + _, err = io.WriteString(writer, buildTrailer(nil)) + require.NoError(t, err) assert.Equal(t, "Validation OK!\n", writer.String()) } diff --git a/bundle/root_test.go b/bundle/root_test.go index 99bf58a00..075242710 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -71,7 +71,7 @@ func TestRootLookup(t *testing.T) { defer f.Close() // Create directory tree. - err = os.MkdirAll("./a/b/c", 0755) + err = os.MkdirAll("./a/b/c", 0o755) require.NoError(t, err) // It should find the project root from $PWD. diff --git a/bundle/run/job.go b/bundle/run/job.go index 340af961c..b43db9184 100644 --- a/bundle/run/job.go +++ b/bundle/run/job.go @@ -143,7 +143,7 @@ func logProgressCallback(ctx context.Context, progressLogger *cmdio.Logger) func progressLogger.Log(event) // log progress events in using the default logger - log.Infof(ctx, event.String()) + log.Info(ctx, event.String()) } } @@ -203,7 +203,7 @@ func (r *jobRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, e logDebug(r) logProgress(r) }).GetWithTimeout(jobRunTimeout) - if err != nil && runId != nil { + if err != nil { r.logFailedTasks(ctx, *runId) } if err != nil { @@ -289,7 +289,6 @@ func (r *jobRunner) Cancel(ctx context.Context) error { ActiveOnly: true, JobId: jobID, }) - if err != nil { return err } diff --git a/bundle/run/job_args.go b/bundle/run/job_args.go index 85cf96efb..b1596bbb0 100644 --- a/bundle/run/job_args.go +++ b/bundle/run/job_args.go @@ -131,7 +131,7 @@ func (r *jobRunner) posArgsHandler() argsHandler { } // Handle task parameters otherwise. - var seen = make(map[jobTaskType]bool) + seen := make(map[jobTaskType]bool) for _, t := range job.Tasks { if t.NotebookTask != nil { seen[jobTaskTypeNotebook] = true diff --git a/bundle/run/job_options.go b/bundle/run/job_options.go index c359e79eb..6a03dff95 100644 --- a/bundle/run/job_options.go +++ b/bundle/run/job_options.go @@ -80,7 +80,7 @@ func (o *JobOptions) validatePipelineParams() (*jobs.PipelineParams, error) { return nil, nil } - var defaultErr = fmt.Errorf("job run argument --pipeline-params only supports `full_refresh=`") + defaultErr := fmt.Errorf("job run argument --pipeline-params only supports `full_refresh=`") v, ok := o.pipelineParams["full_refresh"] if !ok { return nil, defaultErr diff --git a/bundle/run/job_test.go b/bundle/run/job_test.go index 369c546aa..5d19ca4ff 100644 --- a/bundle/run/job_test.go +++ b/bundle/run/job_test.go @@ -42,7 +42,8 @@ func TestConvertPythonParams(t *testing.T) { opts := &Options{ Job: JobOptions{}, } - runner.convertPythonParams(opts) + err := runner.convertPythonParams(opts) + require.NoError(t, err) require.NotContains(t, opts.Job.notebookParams, "__python_params") opts = &Options{ @@ -50,7 +51,8 @@ func TestConvertPythonParams(t *testing.T) { pythonParams: []string{"param1", "param2", "param3"}, }, } - runner.convertPythonParams(opts) + err = runner.convertPythonParams(opts) + require.NoError(t, err) require.Contains(t, opts.Job.notebookParams, "__python_params") require.Equal(t, opts.Job.notebookParams["__python_params"], `["param1","param2","param3"]`) } @@ -158,7 +160,7 @@ func TestJobRunnerRestart(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) ctx := context.Background() - ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "")) + ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "")) ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend)) jobApi := m.GetMockJobsAPI() @@ -229,7 +231,7 @@ func TestJobRunnerRestartForContinuousUnpausedJobs(t *testing.T) { m := mocks.NewMockWorkspaceClient(t) b.SetWorkpaceClient(m.WorkspaceClient) ctx := context.Background() - ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) + ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend)) jobApi := m.GetMockJobsAPI() diff --git a/bundle/run/output/task.go b/bundle/run/output/task.go index 402e4d66a..1ef78a8c3 100644 --- a/bundle/run/output/task.go +++ b/bundle/run/output/task.go @@ -7,13 +7,15 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" ) -type NotebookOutput jobs.NotebookOutput -type DbtOutput jobs.DbtOutput -type SqlOutput jobs.SqlOutput -type LogsOutput struct { - Logs string `json:"logs"` - LogsTruncated bool `json:"logs_truncated"` -} +type ( + NotebookOutput jobs.NotebookOutput + DbtOutput jobs.DbtOutput + SqlOutput jobs.SqlOutput + LogsOutput struct { + Logs string `json:"logs"` + LogsTruncated bool `json:"logs_truncated"` + } +) func structToString(val any) (string, error) { b, err := json.MarshalIndent(val, "", " ") diff --git a/bundle/run/pipeline.go b/bundle/run/pipeline.go index ffe012843..a0e7d1e1e 100644 --- a/bundle/run/pipeline.go +++ b/bundle/run/pipeline.go @@ -37,11 +37,11 @@ func (r *pipelineRunner) logEvent(ctx context.Context, event pipelines.PipelineE } } if logString != "" { - log.Errorf(ctx, fmt.Sprintf("[%s] %s", event.EventType, logString)) + log.Errorf(ctx, "[%s] %s", event.EventType, logString) } } -func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId string, updateId string) error { +func (r *pipelineRunner) logErrorEvent(ctx context.Context, pipelineId, updateId string) error { w := r.bundle.WorkspaceClient() // Note: For a 100 percent correct and complete solution we should use the @@ -85,7 +85,7 @@ func (r *pipelineRunner) Name() string { } func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) { - var pipelineID = r.pipeline.ID + pipelineID := r.pipeline.ID // Include resource key in logger. ctx = log.NewContext(ctx, log.GetLogger(ctx).With("resource", r.Key())) @@ -132,7 +132,7 @@ func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutp } for _, event := range events { progressLogger.Log(&event) - log.Infof(ctx, event.String()) + log.Info(ctx, event.String()) } update, err := w.Pipelines.GetUpdateByPipelineIdAndUpdateId(ctx, pipelineID, updateID) @@ -173,7 +173,6 @@ func (r *pipelineRunner) Cancel(ctx context.Context) error { wait, err := w.Pipelines.Stop(ctx, pipelines.StopRequest{ PipelineId: r.pipeline.ID, }) - if err != nil { return err } diff --git a/bundle/run/pipeline_test.go b/bundle/run/pipeline_test.go index e4608061c..66f9d86be 100644 --- a/bundle/run/pipeline_test.go +++ b/bundle/run/pipeline_test.go @@ -76,7 +76,7 @@ func TestPipelineRunnerRestart(t *testing.T) { } b.SetWorkpaceClient(m.WorkspaceClient) ctx := context.Background() - ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) + ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) ctx = cmdio.NewContext(ctx, cmdio.NewLogger(flags.ModeAppend)) mockWait := &pipelines.WaitGetPipelineIdle[struct{}]{ diff --git a/bundle/run/progress/pipeline.go b/bundle/run/progress/pipeline.go index 4a256e76c..b82dd7abd 100644 --- a/bundle/run/progress/pipeline.go +++ b/bundle/run/progress/pipeline.go @@ -51,7 +51,7 @@ type UpdateTracker struct { w *databricks.WorkspaceClient } -func NewUpdateTracker(pipelineId string, updateId string, w *databricks.WorkspaceClient) *UpdateTracker { +func NewUpdateTracker(pipelineId, updateId string, w *databricks.WorkspaceClient) *UpdateTracker { return &UpdateTracker{ w: w, PipelineId: pipelineId, diff --git a/bundle/schema/embed_test.go b/bundle/schema/embed_test.go index e4b45baa5..ff2e7651b 100644 --- a/bundle/schema/embed_test.go +++ b/bundle/schema/embed_test.go @@ -58,16 +58,6 @@ func TestJsonSchema(t *testing.T) { assert.NotEmpty(t, pipeline.AnyOf[0].Properties[field].Description) } - // Assert enum values are loaded - schedule := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "pipelines.RestartWindow") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "MONDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "TUESDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "WEDNESDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "THURSDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "FRIDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SATURDAY") - assert.Contains(t, schedule.AnyOf[0].Properties["days_of_week"].Enum, "SUNDAY") - providers := walk(s.Definitions, "github.com", "databricks", "databricks-sdk-go", "service", "jobs.GitProvider") assert.Contains(t, providers.Enum, "gitHub") assert.Contains(t, providers.Enum, "bitbucketCloud") diff --git a/bundle/schema/jsonschema.json b/bundle/schema/jsonschema.json index f791b8440..e813e4406 100644 --- a/bundle/schema/jsonschema.json +++ b/bundle/schema/jsonschema.json @@ -2888,7 +2888,7 @@ "anyOf": [ { "type": "object", - "description": "Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If not specified, the job/pipeline runs as the user who created the job/pipeline.\n\nEither `user_name` or `service_principal_name` should be specified. If not, an error is thrown.", + "description": "Write-only setting. Specifies the user or service principal that the job runs as. If not specified, the job runs as the user who created the job.\n\nEither `user_name` or `service_principal_name` should be specified. If not, an error is thrown.", "properties": { "service_principal_name": { "description": "Application ID of an active service principal. Setting this field requires the `servicePrincipal/user` role.", @@ -4436,16 +4436,7 @@ "properties": { "days_of_week": { "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", - "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindowDaysOfWeek", - "enum": [ - "MONDAY", - "TUESDAY", - "WEDNESDAY", - "THURSDAY", - "FRIDAY", - "SATURDAY", - "SUNDAY" - ] + "$ref": "#/$defs/github.com/databricks/databricks-sdk-go/service/pipelines.RestartWindowDaysOfWeek" }, "start_hour": { "description": "An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.\nContinuous pipeline restart is triggered only within a five-hour window starting at this hour.", @@ -4468,7 +4459,17 @@ ] }, "pipelines.RestartWindowDaysOfWeek": { - "type": "string" + "type": "string", + "description": "Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour).\nIf not specified all days of the week will be used.", + "enum": [ + "MONDAY", + "TUESDAY", + "WEDNESDAY", + "THURSDAY", + "FRIDAY", + "SATURDAY", + "SUNDAY" + ] }, "pipelines.SchemaSpec": { "anyOf": [ diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go index 920577146..113a6140b 100644 --- a/bundle/tests/run_as_test.go +++ b/bundle/tests/run_as_test.go @@ -93,7 +93,6 @@ func TestRunAsForAllowedWithTargetOverride(t *testing.T) { assert.Equal(t, ml.Model{Name: "skynet"}, *b.Config.Resources.Models["model_one"].Model) assert.Equal(t, catalog.CreateRegisteredModelRequest{Name: "skynet (in UC)"}, *b.Config.Resources.RegisteredModels["model_two"].CreateRegisteredModelRequest) assert.Equal(t, ml.Experiment{Name: "experiment_one"}, *b.Config.Resources.Experiments["experiment_one"].Experiment) - } func TestRunAsErrorForPipelines(t *testing.T) { @@ -220,7 +219,6 @@ func TestRunAsErrorNeitherUserOrSpSpecified(t *testing.T) { for _, tc := range tcases { t.Run(tc.name, func(t *testing.T) { - bundlePath := fmt.Sprintf("./run_as/not_allowed/neither_sp_nor_user/%s", tc.name) b := load(t, bundlePath) diff --git a/bundle/tests/suggest_target_test.go b/bundle/tests/suggest_target_test.go index 8fb130409..02905d779 100644 --- a/bundle/tests/suggest_target_test.go +++ b/bundle/tests/suggest_target_test.go @@ -1,22 +1,22 @@ package config_tests import ( - "path/filepath" + "context" "testing" - "github.com/databricks/cli/cmd/root" - assert "github.com/databricks/cli/libs/dyn/dynassert" - - "github.com/databricks/cli/internal" + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/require" ) func TestSuggestTargetIfWrongPassed(t *testing.T) { - t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace")) - stdoutBytes, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect") - stdout := stdoutBytes.String() + b := load(t, "target_overrides/workspace") - assert.Error(t, root.ErrAlreadyPrinted, err) - assert.Contains(t, stdout, "Available targets:") - assert.Contains(t, stdout, "development") - assert.Contains(t, stdout, "staging") + ctx := context.Background() + diags := bundle.Apply(ctx, b, mutator.SelectTarget("incorrect")) + err := diags.Error() + require.Error(t, err) + require.Contains(t, err.Error(), "Available targets:") + require.Contains(t, err.Error(), "development") + require.Contains(t, err.Error(), "staging") } diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 9451c5a04..37d488fad 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -151,7 +151,7 @@ func TestVariablesWithTargetLookupOverrides(t *testing.T) { } func TestVariableTargetOverrides(t *testing.T) { - var tcases = []struct { + tcases := []struct { targetName string pipelineName string pipelineContinuous bool diff --git a/bundle/trampoline/python_dbr_warning.go b/bundle/trampoline/python_dbr_warning.go index cf3e9aeb3..0318df7c9 100644 --- a/bundle/trampoline/python_dbr_warning.go +++ b/bundle/trampoline/python_dbr_warning.go @@ -14,8 +14,7 @@ import ( "golang.org/x/mod/semver" ) -type wrapperWarning struct { -} +type wrapperWarning struct{} func WrapperWarning() bundle.Mutator { return &wrapperWarning{} @@ -62,7 +61,6 @@ func hasIncompatibleWheelTasks(ctx context.Context, b *bundle.Bundle) bool { if task.ExistingClusterId != "" { version, err := getSparkVersionForCluster(ctx, b.WorkspaceClient(), task.ExistingClusterId) - // If there's error getting spark version for cluster, do not mark it as incompatible if err != nil { log.Warnf(ctx, "unable to get spark version for cluster %s, err: %s", task.ExistingClusterId, err.Error()) diff --git a/bundle/trampoline/python_wheel_test.go b/bundle/trampoline/python_wheel_test.go index 517be35e4..d75a3eca3 100644 --- a/bundle/trampoline/python_wheel_test.go +++ b/bundle/trampoline/python_wheel_test.go @@ -127,7 +127,8 @@ func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { Tasks: []jobs.Task{ { TaskKey: "notebook_task", - NotebookTask: &jobs.NotebookTask{}}, + NotebookTask: &jobs.NotebookTask{}, + }, }, }, }, diff --git a/bundle/trampoline/trampoline.go b/bundle/trampoline/trampoline.go index 1dc1c4463..600ce3d9c 100644 --- a/bundle/trampoline/trampoline.go +++ b/bundle/trampoline/trampoline.go @@ -62,7 +62,7 @@ func (m *trampoline) generateNotebookWrapper(ctx context.Context, b *bundle.Bund notebookName := fmt.Sprintf("notebook_%s_%s", task.JobKey, task.Task.TaskKey) localNotebookPath := filepath.Join(internalDir, notebookName+".py") - err = os.MkdirAll(filepath.Dir(localNotebookPath), 0755) + err = os.MkdirAll(filepath.Dir(localNotebookPath), 0o755) if err != nil { return err } diff --git a/bundle/trampoline/trampoline_test.go b/bundle/trampoline/trampoline_test.go index 4682d8fa0..3c5d18570 100644 --- a/bundle/trampoline/trampoline_test.go +++ b/bundle/trampoline/trampoline_test.go @@ -52,7 +52,8 @@ func TestGenerateTrampoline(t *testing.T) { PythonWheelTask: &jobs.PythonWheelTask{ PackageName: "test", EntryPoint: "run", - }}, + }, + }, } b := &bundle.Bundle{ diff --git a/cmd/api/api.go b/cmd/api/api.go index d33939a52..c3a3eb0b6 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -39,7 +39,7 @@ func makeCommand(method string) *cobra.Command { Args: root.ExactArgs(1), Short: fmt.Sprintf("Perform %s request", method), RunE: func(cmd *cobra.Command, args []string) error { - var path = args[0] + path := args[0] var request any diags := payload.Unmarshal(&request) diff --git a/cmd/auth/describe.go b/cmd/auth/describe.go index 3a6e3d5d7..faaf64f8f 100644 --- a/cmd/auth/describe.go +++ b/cmd/auth/describe.go @@ -59,7 +59,6 @@ func newDescribeCommand() *cobra.Command { isAccount, err := root.MustAnyClient(cmd, args) return root.ConfigUsed(cmd.Context()), isAccount, err }) - if err != nil { return err } @@ -141,7 +140,10 @@ func render(ctx context.Context, cmd *cobra.Command, status *authStatus, templat if err != nil { return err } - cmd.OutOrStdout().Write(buf) + _, err = cmd.OutOrStdout().Write(buf) + if err != nil { + return err + } default: return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } diff --git a/cmd/auth/describe_test.go b/cmd/auth/describe_test.go index d0260abc7..7f5f900d4 100644 --- a/cmd/auth/describe_test.go +++ b/cmd/auth/describe_test.go @@ -31,7 +31,8 @@ func TestGetWorkspaceAuthStatus(t *testing.T) { cmd.Flags().String("host", "", "") cmd.Flags().String("profile", "", "") - cmd.Flag("profile").Value.Set("my-profile") + err := cmd.Flag("profile").Value.Set("my-profile") + require.NoError(t, err) cmd.Flag("profile").Changed = true cfg := &config.Config{ @@ -39,14 +40,16 @@ func TestGetWorkspaceAuthStatus(t *testing.T) { } m.WorkspaceClient.Config = cfg t.Setenv("DATABRICKS_AUTH_TYPE", "azure-cli") - config.ConfigAttributes.Configure(cfg) + err = config.ConfigAttributes.Configure(cfg) + require.NoError(t, err) status, err := getAuthStatus(cmd, []string{}, showSensitive, func(cmd *cobra.Command, args []string) (*config.Config, bool, error) { - config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ + err := config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ "host": "https://test.com", "token": "test-token", "auth_type": "azure-cli", }) + require.NoError(t, err) return cfg, false, nil }) require.NoError(t, err) @@ -81,7 +84,8 @@ func TestGetWorkspaceAuthStatusError(t *testing.T) { cmd.Flags().String("host", "", "") cmd.Flags().String("profile", "", "") - cmd.Flag("profile").Value.Set("my-profile") + err := cmd.Flag("profile").Value.Set("my-profile") + require.NoError(t, err) cmd.Flag("profile").Changed = true cfg := &config.Config{ @@ -89,10 +93,11 @@ func TestGetWorkspaceAuthStatusError(t *testing.T) { } m.WorkspaceClient.Config = cfg t.Setenv("DATABRICKS_AUTH_TYPE", "azure-cli") - config.ConfigAttributes.Configure(cfg) + err = config.ConfigAttributes.Configure(cfg) + require.NoError(t, err) status, err := getAuthStatus(cmd, []string{}, showSensitive, func(cmd *cobra.Command, args []string) (*config.Config, bool, error) { - config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ + err = config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ "host": "https://test.com", "token": "test-token", "auth_type": "azure-cli", @@ -128,7 +133,8 @@ func TestGetWorkspaceAuthStatusSensitive(t *testing.T) { cmd.Flags().String("host", "", "") cmd.Flags().String("profile", "", "") - cmd.Flag("profile").Value.Set("my-profile") + err := cmd.Flag("profile").Value.Set("my-profile") + require.NoError(t, err) cmd.Flag("profile").Changed = true cfg := &config.Config{ @@ -136,10 +142,11 @@ func TestGetWorkspaceAuthStatusSensitive(t *testing.T) { } m.WorkspaceClient.Config = cfg t.Setenv("DATABRICKS_AUTH_TYPE", "azure-cli") - config.ConfigAttributes.Configure(cfg) + err = config.ConfigAttributes.Configure(cfg) + require.NoError(t, err) status, err := getAuthStatus(cmd, []string{}, showSensitive, func(cmd *cobra.Command, args []string) (*config.Config, bool, error) { - config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ + err = config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ "host": "https://test.com", "token": "test-token", "auth_type": "azure-cli", @@ -171,7 +178,8 @@ func TestGetAccountAuthStatus(t *testing.T) { cmd.Flags().String("host", "", "") cmd.Flags().String("profile", "", "") - cmd.Flag("profile").Value.Set("my-profile") + err := cmd.Flag("profile").Value.Set("my-profile") + require.NoError(t, err) cmd.Flag("profile").Changed = true cfg := &config.Config{ @@ -179,13 +187,14 @@ func TestGetAccountAuthStatus(t *testing.T) { } m.AccountClient.Config = cfg t.Setenv("DATABRICKS_AUTH_TYPE", "azure-cli") - config.ConfigAttributes.Configure(cfg) + err = config.ConfigAttributes.Configure(cfg) + require.NoError(t, err) wsApi := m.GetMockWorkspacesAPI() wsApi.EXPECT().List(mock.Anything).Return(nil, nil) status, err := getAuthStatus(cmd, []string{}, showSensitive, func(cmd *cobra.Command, args []string) (*config.Config, bool, error) { - config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ + err = config.ConfigAttributes.ResolveFromStringMap(cfg, map[string]string{ "account_id": "test-account-id", "username": "test-user", "host": "https://test.com", diff --git a/cmd/auth/env.go b/cmd/auth/env.go index e72d15399..52b7cbbfd 100644 --- a/cmd/auth/env.go +++ b/cmd/auth/env.go @@ -138,7 +138,7 @@ func newEnvCommand() *cobra.Command { if err != nil { return err } - cmd.OutOrStdout().Write(raw) + _, _ = cmd.OutOrStdout().Write(raw) return nil } diff --git a/cmd/auth/login.go b/cmd/auth/login.go index 79b795468..c98676599 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -29,8 +29,10 @@ func promptForProfile(ctx context.Context, defaultValue string) (string, error) return prompt.Run() } -const minimalDbConnectVersion = "13.1" -const defaultTimeout = 1 * time.Hour +const ( + minimalDbConnectVersion = "13.1" + defaultTimeout = 1 * time.Hour +) func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { defaultConfigPath := "~/.databrickscfg" diff --git a/cmd/auth/token.go b/cmd/auth/token.go index 3f9af43fa..fbf8b68f6 100644 --- a/cmd/auth/token.go +++ b/cmd/auth/token.go @@ -94,7 +94,7 @@ func newTokenCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { if err != nil { return err } - cmd.OutOrStdout().Write(raw) + _, _ = cmd.OutOrStdout().Write(raw) return nil } diff --git a/cmd/bundle/debug/terraform.go b/cmd/bundle/debug/terraform.go index 843ecac4e..c7d49ebb2 100644 --- a/cmd/bundle/debug/terraform.go +++ b/cmd/bundle/debug/terraform.go @@ -60,13 +60,13 @@ For more information about filesystem mirrors, see the Terraform documentation: } switch root.OutputType(cmd) { case flags.OutputText: - cmdio.Render(cmd.Context(), dependencies.Terraform) + _ = cmdio.Render(cmd.Context(), dependencies.Terraform) case flags.OutputJSON: buf, err := json.MarshalIndent(dependencies, "", " ") if err != nil { return err } - cmd.OutOrStdout().Write(buf) + _, _ = cmd.OutOrStdout().Write(buf) default: return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } diff --git a/cmd/bundle/generate/dashboard.go b/cmd/bundle/generate/dashboard.go index 4a538a293..f196bbe62 100644 --- a/cmd/bundle/generate/dashboard.go +++ b/cmd/bundle/generate/dashboard.go @@ -158,7 +158,7 @@ func (d *dashboard) saveSerializedDashboard(_ context.Context, b *bundle.Bundle, } // Make sure the output directory exists. - if err := os.MkdirAll(filepath.Dir(filename), 0755); err != nil { + if err := os.MkdirAll(filepath.Dir(filename), 0o755); err != nil { return err } @@ -183,7 +183,7 @@ func (d *dashboard) saveSerializedDashboard(_ context.Context, b *bundle.Bundle, } fmt.Printf("Writing dashboard to %q\n", rel) - return os.WriteFile(filename, data, 0644) + return os.WriteFile(filename, data, 0o644) } func (d *dashboard) saveConfiguration(ctx context.Context, b *bundle.Bundle, dashboard *dashboards.Dashboard, key string) error { @@ -210,7 +210,7 @@ func (d *dashboard) saveConfiguration(ctx context.Context, b *bundle.Bundle, das } // Make sure the output directory exists. - if err := os.MkdirAll(d.resourceDir, 0755); err != nil { + if err := os.MkdirAll(d.resourceDir, 0o755); err != nil { return err } diff --git a/cmd/bundle/generate/dashboard_test.go b/cmd/bundle/generate/dashboard_test.go index 6741e6a39..f1161950b 100644 --- a/cmd/bundle/generate/dashboard_test.go +++ b/cmd/bundle/generate/dashboard_test.go @@ -67,9 +67,10 @@ func TestDashboard_ExistingID_Nominal(t *testing.T) { ctx := bundle.Context(context.Background(), b) cmd := NewGenerateDashboardCommand() cmd.SetContext(ctx) - cmd.Flag("existing-id").Value.Set("f00dcafe") + err := cmd.Flag("existing-id").Value.Set("f00dcafe") + require.NoError(t, err) - err := cmd.RunE(cmd, []string{}) + err = cmd.RunE(cmd, []string{}) require.NoError(t, err) // Assert the contents of the generated configuration @@ -105,9 +106,10 @@ func TestDashboard_ExistingID_NotFound(t *testing.T) { ctx := bundle.Context(context.Background(), b) cmd := NewGenerateDashboardCommand() cmd.SetContext(ctx) - cmd.Flag("existing-id").Value.Set("f00dcafe") + err := cmd.Flag("existing-id").Value.Set("f00dcafe") + require.NoError(t, err) - err := cmd.RunE(cmd, []string{}) + err = cmd.RunE(cmd, []string{}) require.Error(t, err) } @@ -137,9 +139,10 @@ func TestDashboard_ExistingPath_Nominal(t *testing.T) { ctx := bundle.Context(context.Background(), b) cmd := NewGenerateDashboardCommand() cmd.SetContext(ctx) - cmd.Flag("existing-path").Value.Set("/path/to/dashboard") + err := cmd.Flag("existing-path").Value.Set("/path/to/dashboard") + require.NoError(t, err) - err := cmd.RunE(cmd, []string{}) + err = cmd.RunE(cmd, []string{}) require.NoError(t, err) // Assert the contents of the generated configuration @@ -175,8 +178,9 @@ func TestDashboard_ExistingPath_NotFound(t *testing.T) { ctx := bundle.Context(context.Background(), b) cmd := NewGenerateDashboardCommand() cmd.SetContext(ctx) - cmd.Flag("existing-path").Value.Set("/path/to/dashboard") + err := cmd.Flag("existing-path").Value.Set("/path/to/dashboard") + require.NoError(t, err) - err := cmd.RunE(cmd, []string{}) + err = cmd.RunE(cmd, []string{}) require.Error(t, err) } diff --git a/cmd/bundle/generate/generate_test.go b/cmd/bundle/generate/generate_test.go index bc1549e64..896b7de51 100644 --- a/cmd/bundle/generate/generate_test.go +++ b/cmd/bundle/generate/generate_test.go @@ -78,13 +78,13 @@ func TestGeneratePipelineCommand(t *testing.T) { workspaceApi.EXPECT().Download(mock.Anything, "/test/file.py", mock.Anything).Return(pyContent, nil) cmd.SetContext(bundle.Context(context.Background(), b)) - cmd.Flag("existing-pipeline-id").Value.Set("test-pipeline") + require.NoError(t, cmd.Flag("existing-pipeline-id").Value.Set("test-pipeline")) configDir := filepath.Join(root, "resources") - cmd.Flag("config-dir").Value.Set(configDir) + require.NoError(t, cmd.Flag("config-dir").Value.Set(configDir)) srcDir := filepath.Join(root, "src") - cmd.Flag("source-dir").Value.Set(srcDir) + require.NoError(t, cmd.Flag("source-dir").Value.Set(srcDir)) var key string cmd.Flags().StringVar(&key, "key", "test_pipeline", "") @@ -174,13 +174,13 @@ func TestGenerateJobCommand(t *testing.T) { workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil) cmd.SetContext(bundle.Context(context.Background(), b)) - cmd.Flag("existing-job-id").Value.Set("1234") + require.NoError(t, cmd.Flag("existing-job-id").Value.Set("1234")) configDir := filepath.Join(root, "resources") - cmd.Flag("config-dir").Value.Set(configDir) + require.NoError(t, cmd.Flag("config-dir").Value.Set(configDir)) srcDir := filepath.Join(root, "src") - cmd.Flag("source-dir").Value.Set(srcDir) + require.NoError(t, cmd.Flag("source-dir").Value.Set(srcDir)) var key string cmd.Flags().StringVar(&key, "key", "test_job", "") @@ -217,7 +217,7 @@ func TestGenerateJobCommand(t *testing.T) { } func touchEmptyFile(t *testing.T, path string) { - err := os.MkdirAll(filepath.Dir(path), 0700) + err := os.MkdirAll(filepath.Dir(path), 0o700) require.NoError(t, err) f, err := os.Create(path) require.NoError(t, err) @@ -279,13 +279,13 @@ func TestGenerateJobCommandOldFileRename(t *testing.T) { workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil) cmd.SetContext(bundle.Context(context.Background(), b)) - cmd.Flag("existing-job-id").Value.Set("1234") + require.NoError(t, cmd.Flag("existing-job-id").Value.Set("1234")) configDir := filepath.Join(root, "resources") - cmd.Flag("config-dir").Value.Set(configDir) + require.NoError(t, cmd.Flag("config-dir").Value.Set(configDir)) srcDir := filepath.Join(root, "src") - cmd.Flag("source-dir").Value.Set(srcDir) + require.NoError(t, cmd.Flag("source-dir").Value.Set(srcDir)) var key string cmd.Flags().StringVar(&key, "key", "test_job", "") @@ -295,7 +295,7 @@ func TestGenerateJobCommandOldFileRename(t *testing.T) { touchEmptyFile(t, oldFilename) // Having an existing files require --force flag to regenerate them - cmd.Flag("force").Value.Set("true") + require.NoError(t, cmd.Flag("force").Value.Set("true")) err := cmd.RunE(cmd, []string{}) require.NoError(t, err) diff --git a/cmd/bundle/generate/utils.go b/cmd/bundle/generate/utils.go index 65f692419..8e3764e35 100644 --- a/cmd/bundle/generate/utils.go +++ b/cmd/bundle/generate/utils.go @@ -87,7 +87,7 @@ func (n *downloader) markNotebookForDownload(ctx context.Context, notebookPath * } func (n *downloader) FlushToDisk(ctx context.Context, force bool) error { - err := os.MkdirAll(n.sourceDir, 0755) + err := os.MkdirAll(n.sourceDir, 0o755) if err != nil { return err } @@ -134,7 +134,7 @@ func (n *downloader) FlushToDisk(ctx context.Context, force bool) error { return errs.Wait() } -func newDownloader(w *databricks.WorkspaceClient, sourceDir string, configDir string) *downloader { +func newDownloader(w *databricks.WorkspaceClient, sourceDir, configDir string) *downloader { return &downloader{ files: make(map[string]string), w: w, diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 7a92766d9..3bcebddd5 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -159,13 +159,19 @@ task or a Python wheel task, the second example applies. if err != nil { return err } - cmd.OutOrStdout().Write([]byte(resultString)) + _, err = cmd.OutOrStdout().Write([]byte(resultString)) + if err != nil { + return err + } case flags.OutputJSON: b, err := json.MarshalIndent(output, "", " ") if err != nil { return err } - cmd.OutOrStdout().Write(b) + _, err = cmd.OutOrStdout().Write(b) + if err != nil { + return err + } default: return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } diff --git a/cmd/bundle/summary.go b/cmd/bundle/summary.go index 8c34dd612..7c669c845 100644 --- a/cmd/bundle/summary.go +++ b/cmd/bundle/summary.go @@ -73,7 +73,7 @@ func newSummaryCommand() *cobra.Command { if err != nil { return err } - cmd.OutOrStdout().Write(buf) + _, _ = cmd.OutOrStdout().Write(buf) default: return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 5331e7e7b..3b50cc258 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -20,7 +20,7 @@ func renderJsonOutput(cmd *cobra.Command, b *bundle.Bundle, diags diag.Diagnosti if err != nil { return err } - cmd.OutOrStdout().Write(buf) + _, _ = cmd.OutOrStdout().Write(buf) return diags.Error() } diff --git a/cmd/configure/configure_test.go b/cmd/configure/configure_test.go index a127fe57a..e2f6c1e29 100644 --- a/cmd/configure/configure_test.go +++ b/cmd/configure/configure_test.go @@ -31,7 +31,7 @@ func setup(t *testing.T) string { return tempHomeDir } -func getTempFileWithContent(t *testing.T, tempHomeDir string, content string) *os.File { +func getTempFileWithContent(t *testing.T, tempHomeDir, content string) *os.File { inp, err := os.CreateTemp(tempHomeDir, "input") assert.NoError(t, err) _, err = inp.WriteString(content) @@ -75,7 +75,7 @@ func TestDefaultConfigureNoInteractive(t *testing.T) { } func TestConfigFileFromEnvNoInteractive(t *testing.T) { - //TODO: Replace with similar test code from go SDK, once we start using it directly + // TODO: Replace with similar test code from go SDK, once we start using it directly ctx := context.Background() tempHomeDir := setup(t) defaultCfgPath := filepath.Join(tempHomeDir, ".databrickscfg") diff --git a/cmd/labs/github/github.go b/cmd/labs/github/github.go index 1dd9fae5e..a67df1022 100644 --- a/cmd/labs/github/github.go +++ b/cmd/labs/github/github.go @@ -12,12 +12,16 @@ import ( "github.com/databricks/cli/libs/log" ) -const gitHubAPI = "https://api.github.com" -const gitHubUserContent = "https://raw.githubusercontent.com" +const ( + gitHubAPI = "https://api.github.com" + gitHubUserContent = "https://raw.githubusercontent.com" +) // Placeholders to use as unique keys in context.Context. -var apiOverride int -var userContentOverride int +var ( + apiOverride int + userContentOverride int +) func WithApiOverride(ctx context.Context, override string) context.Context { return context.WithValue(ctx, &apiOverride, override) diff --git a/cmd/labs/github/ref_test.go b/cmd/labs/github/ref_test.go index 2a9ffcc5b..cc27d1e81 100644 --- a/cmd/labs/github/ref_test.go +++ b/cmd/labs/github/ref_test.go @@ -7,12 +7,14 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestFileFromRef(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/databrickslabs/ucx/main/README.md" { - w.Write([]byte(`abc`)) + _, err := w.Write([]byte(`abc`)) + require.NoError(t, err) return } t.Logf("Requested: %s", r.URL.Path) @@ -31,7 +33,8 @@ func TestFileFromRef(t *testing.T) { func TestDownloadZipball(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/repos/databrickslabs/ucx/zipball/main" { - w.Write([]byte(`abc`)) + _, err := w.Write([]byte(`abc`)) + require.NoError(t, err) return } t.Logf("Requested: %s", r.URL.Path) diff --git a/cmd/labs/github/releases_test.go b/cmd/labs/github/releases_test.go index ea24a1e2e..9c3d7a959 100644 --- a/cmd/labs/github/releases_test.go +++ b/cmd/labs/github/releases_test.go @@ -7,12 +7,14 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestLoadsReleasesForCLI(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/repos/databricks/cli/releases" { - w.Write([]byte(`[{"tag_name": "v1.2.3"}, {"tag_name": "v1.2.2"}]`)) + _, err := w.Write([]byte(`[{"tag_name": "v1.2.3"}, {"tag_name": "v1.2.2"}]`)) + require.NoError(t, err) return } t.Logf("Requested: %s", r.URL.Path) diff --git a/cmd/labs/github/repositories_test.go b/cmd/labs/github/repositories_test.go index 4f2fef3e1..412b440bc 100644 --- a/cmd/labs/github/repositories_test.go +++ b/cmd/labs/github/repositories_test.go @@ -7,12 +7,14 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRepositories(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/users/databrickslabs/repos" { - w.Write([]byte(`[{"name": "x"}]`)) + _, err := w.Write([]byte(`[{"name": "x"}]`)) + require.NoError(t, err) return } t.Logf("Requested: %s", r.URL.Path) diff --git a/cmd/labs/installed_test.go b/cmd/labs/installed_test.go index 00692f796..3c38e5e11 100644 --- a/cmd/labs/installed_test.go +++ b/cmd/labs/installed_test.go @@ -4,14 +4,14 @@ import ( "context" "testing" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/env" ) func TestListsInstalledProjects(t *testing.T) { ctx := context.Background() ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home") - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "installed") + r := testcli.NewRunner(t, ctx, "labs", "installed") r.RunAndExpectOutput(` Name Description Version blueprint Blueprint Project v0.3.15 diff --git a/cmd/labs/list_test.go b/cmd/labs/list_test.go index 925b984ab..4388fdd0e 100644 --- a/cmd/labs/list_test.go +++ b/cmd/labs/list_test.go @@ -4,7 +4,7 @@ import ( "context" "testing" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/env" "github.com/stretchr/testify/require" ) @@ -12,7 +12,7 @@ import ( func TestListingWorks(t *testing.T) { ctx := context.Background() ctx = env.WithUserHomeDir(ctx, "project/testdata/installed-in-home") - c := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "list") + c := testcli.NewRunner(t, ctx, "labs", "list") stdout, _, err := c.Run() require.NoError(t, err) require.Contains(t, stdout.String(), "ucx") diff --git a/cmd/labs/localcache/jsonfile.go b/cmd/labs/localcache/jsonfile.go index 495743a57..6540e4ac2 100644 --- a/cmd/labs/localcache/jsonfile.go +++ b/cmd/labs/localcache/jsonfile.go @@ -14,8 +14,10 @@ import ( "github.com/databricks/cli/libs/log" ) -const userRW = 0o600 -const ownerRWXworldRX = 0o755 +const ( + userRW = 0o600 + ownerRWXworldRX = 0o755 +) func NewLocalCache[T any](dir, name string, validity time.Duration) LocalCache[T] { return LocalCache[T]{ diff --git a/cmd/labs/project/command_test.go b/cmd/labs/project/command_test.go index 20021879f..453329e1d 100644 --- a/cmd/labs/project/command_test.go +++ b/cmd/labs/project/command_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/python" "github.com/databricks/databricks-sdk-go" @@ -30,7 +30,7 @@ func devEnvContext(t *testing.T) context.Context { func TestRunningBlueprintEcho(t *testing.T) { ctx := devEnvContext(t) - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "echo") + r := testcli.NewRunner(t, ctx, "labs", "blueprint", "echo") var out echoOut r.RunAndParseJSON(&out) assert.Equal(t, "echo", out.Command) @@ -41,14 +41,14 @@ func TestRunningBlueprintEcho(t *testing.T) { func TestRunningBlueprintEchoProfileWrongOverride(t *testing.T) { ctx := devEnvContext(t) - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "echo", "--profile", "workspace-profile") + r := testcli.NewRunner(t, ctx, "labs", "blueprint", "echo", "--profile", "workspace-profile") _, _, err := r.Run() assert.ErrorIs(t, err, databricks.ErrNotAccountClient) } func TestRunningCommand(t *testing.T) { ctx := devEnvContext(t) - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "foo") + r := testcli.NewRunner(t, ctx, "labs", "blueprint", "foo") r.WithStdin() defer r.CloseStdin() @@ -60,7 +60,7 @@ func TestRunningCommand(t *testing.T) { func TestRenderingTable(t *testing.T) { ctx := devEnvContext(t) - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "blueprint", "table") + r := testcli.NewRunner(t, ctx, "labs", "blueprint", "table") r.RunAndExpectOutput(` Key Value First Second diff --git a/cmd/labs/project/entrypoint.go b/cmd/labs/project/entrypoint.go index 99edf83c8..2bed49145 100644 --- a/cmd/labs/project/entrypoint.go +++ b/cmd/labs/project/entrypoint.go @@ -30,10 +30,12 @@ type Entrypoint struct { IsBundleAware bool `yaml:"is_bundle_aware,omitempty"` } -var ErrNoLoginConfig = errors.New("no login configuration found") -var ErrMissingClusterID = errors.New("missing a cluster compatible with Databricks Connect") -var ErrMissingWarehouseID = errors.New("missing a SQL warehouse") -var ErrNotInTTY = errors.New("not in an interactive terminal") +var ( + ErrNoLoginConfig = errors.New("no login configuration found") + ErrMissingClusterID = errors.New("missing a cluster compatible with Databricks Connect") + ErrMissingWarehouseID = errors.New("missing a SQL warehouse") + ErrNotInTTY = errors.New("not in an interactive terminal") +) func (e *Entrypoint) NeedsCluster() bool { if e.Installer == nil { @@ -190,9 +192,6 @@ func (e *Entrypoint) getLoginConfig(cmd *cobra.Command) (*loginConfig, *config.C if isNoLoginConfig && !e.IsBundleAware { return nil, nil, ErrNoLoginConfig } - if !isNoLoginConfig && err != nil { - return nil, nil, fmt.Errorf("load: %w", err) - } if e.IsAccountLevel { log.Debugf(ctx, "Using account-level login profile: %s", lc.AccountProfile) cfg, err := e.envAwareConfigWithProfile(ctx, lc.AccountProfile) diff --git a/cmd/labs/project/installer_test.go b/cmd/labs/project/installer_test.go index 1e45fafe6..a69389b31 100644 --- a/cmd/labs/project/installer_test.go +++ b/cmd/labs/project/installer_test.go @@ -19,7 +19,7 @@ import ( "github.com/databricks/cli/cmd/labs/github" "github.com/databricks/cli/cmd/labs/project" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/process" "github.com/databricks/cli/libs/python" @@ -29,8 +29,10 @@ import ( "github.com/stretchr/testify/require" ) -const ownerRWXworldRX = 0o755 -const ownerRW = 0o600 +const ( + ownerRWXworldRX = 0o755 + ownerRW = 0o600 +) func zipballFromFolder(src string) ([]byte, error) { var buf bytes.Buffer @@ -117,10 +119,10 @@ func installerContext(t *testing.T, server *httptest.Server) context.Context { func respondWithJSON(t *testing.T, w http.ResponseWriter, v any) { raw, err := json.Marshal(v) - if err != nil { - require.NoError(t, err) - } - w.Write(raw) + require.NoError(t, err) + + _, err = w.Write(raw) + require.NoError(t, err) } type fileTree struct { @@ -167,19 +169,17 @@ func TestInstallerWorksForReleases(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/databrickslabs/blueprint/v0.3.15/labs.yml" { raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml") - if err != nil { - panic(err) - } - w.Write(raw) + require.NoError(t, err) + _, err = w.Write(raw) + require.NoError(t, err) return } if r.URL.Path == "/repos/databrickslabs/blueprint/zipball/v0.3.15" { raw, err := zipballFromFolder("testdata/installed-in-home/.databricks/labs/blueprint/lib") - if err != nil { - panic(err) - } + require.NoError(t, err) w.Header().Add("Content-Type", "application/octet-stream") - w.Write(raw) + _, err = w.Write(raw) + require.NoError(t, err) return } if r.URL.Path == "/api/2.1/clusters/get" { @@ -236,7 +236,7 @@ func TestInstallerWorksForReleases(t *testing.T) { // │ │ │ └── site-packages // │ │ │ ├── ... // │ │ │ ├── distutils-precedence.pth - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "install", "blueprint", "--debug") + r := testcli.NewRunner(t, ctx, "labs", "install", "blueprint", "--debug") r.RunAndExpectOutput("setting up important infrastructure") } @@ -314,7 +314,10 @@ func TestInstallerWorksForDevelopment(t *testing.T) { defer server.Close() wd, _ := os.Getwd() - defer os.Chdir(wd) + defer func() { + err := os.Chdir(wd) + require.NoError(t, err) + }() devDir := copyTestdata(t, "testdata/installed-in-home/.databricks/labs/blueprint/lib") err := os.Chdir(devDir) @@ -353,7 +356,7 @@ account_id = abc // └── databrickslabs-blueprint-releases.json // `databricks labs install .` means "verify this installer i'm developing does work" - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "install", ".") + r := testcli.NewRunner(t, ctx, "labs", "install", ".") r.WithStdin() defer r.CloseStdin() @@ -373,19 +376,17 @@ func TestUpgraderWorksForReleases(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.URL.Path == "/databrickslabs/blueprint/v0.4.0/labs.yml" { raw, err := os.ReadFile("testdata/installed-in-home/.databricks/labs/blueprint/lib/labs.yml") - if err != nil { - panic(err) - } - w.Write(raw) + require.NoError(t, err) + _, err = w.Write(raw) + require.NoError(t, err) return } if r.URL.Path == "/repos/databrickslabs/blueprint/zipball/v0.4.0" { raw, err := zipballFromFolder("testdata/installed-in-home/.databricks/labs/blueprint/lib") - if err != nil { - panic(err) - } + require.NoError(t, err) w.Header().Add("Content-Type", "application/octet-stream") - w.Write(raw) + _, err = w.Write(raw) + require.NoError(t, err) return } if r.URL.Path == "/api/2.1/clusters/get" { @@ -425,7 +426,7 @@ func TestUpgraderWorksForReleases(t *testing.T) { ctx = env.Set(ctx, "DATABRICKS_CLUSTER_ID", "installer-cluster") ctx = env.Set(ctx, "DATABRICKS_WAREHOUSE_ID", "installer-warehouse") - r := internal.NewCobraTestRunnerWithContext(t, ctx, "labs", "upgrade", "blueprint") + r := testcli.NewRunner(t, ctx, "labs", "upgrade", "blueprint") r.RunAndExpectOutput("setting up important infrastructure") // Check if the stub was called with the 'python -m pip install' command diff --git a/cmd/root/auth.go b/cmd/root/auth.go index 107679105..07ab48399 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -15,9 +15,11 @@ import ( ) // Placeholders to use as unique keys in context.Context. -var workspaceClient int -var accountClient int -var configUsed int +var ( + workspaceClient int + accountClient int + configUsed int +) type ErrNoWorkspaceProfiles struct { path string diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index 9ba2a8fa9..784598796 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -15,7 +15,8 @@ import ( ) func TestEmptyHttpRequest(t *testing.T) { - ctx, _ := context.WithCancel(context.Background()) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() req := emptyHttpRequest(ctx) assert.Equal(t, req.Context(), ctx) } @@ -83,7 +84,7 @@ func TestAccountClientOrPrompt(t *testing.T) { account_id = 1112 token = foobar `), - 0755) + 0o755) require.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", configFile) t.Setenv("PATH", "/nothing") @@ -149,7 +150,7 @@ func TestWorkspaceClientOrPrompt(t *testing.T) { host = https://adb-1112.12.azuredatabricks.net/ token = foobar `), - 0755) + 0o755) require.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", configFile) t.Setenv("PATH", "/nothing") @@ -203,7 +204,7 @@ func TestMustAccountClientWorksWithDatabricksCfg(t *testing.T) { account_id = 1111 token = foobar `), - 0755) + 0o755) require.NoError(t, err) cmd := New(context.Background()) @@ -250,7 +251,7 @@ func TestMustAnyClientCanCreateWorkspaceClient(t *testing.T) { host = https://adb-1111.11.azuredatabricks.net/ token = foobar `), - 0755) + 0o755) require.NoError(t, err) ctx, tt := cmdio.SetupTest(context.Background()) @@ -279,7 +280,7 @@ func TestMustAnyClientCanCreateAccountClient(t *testing.T) { account_id = 1111 token = foobar `), - 0755) + 0o755) require.NoError(t, err) ctx, tt := cmdio.SetupTest(context.Background()) @@ -303,7 +304,7 @@ func TestMustAnyClientWithEmptyDatabricksCfg(t *testing.T) { err := os.WriteFile( configFile, []byte(""), // empty file - 0755) + 0o755) require.NoError(t, err) ctx, tt := cmdio.SetupTest(context.Background()) diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 301884287..1998b19e6 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -23,7 +23,7 @@ func setupDatabricksCfg(t *testing.T) { } cfg := []byte("[PROFILE-1]\nhost = https://a.com\ntoken = a\n[PROFILE-2]\nhost = https://a.com\ntoken = b\n") - err := os.WriteFile(filepath.Join(tempHomeDir, ".databrickscfg"), cfg, 0644) + err := os.WriteFile(filepath.Join(tempHomeDir, ".databrickscfg"), cfg, 0o644) assert.NoError(t, err) t.Setenv("DATABRICKS_CONFIG_FILE", "") @@ -48,7 +48,7 @@ func setupWithHost(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle workspace: host: %q `, host) - err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0644) + err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0o644) require.NoError(t, err) b, diags := MustConfigureBundle(cmd) @@ -66,7 +66,7 @@ func setupWithProfile(t *testing.T, cmd *cobra.Command, profile string) *bundle. workspace: profile: %q `, profile) - err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0644) + err := os.WriteFile(filepath.Join(rootPath, "databricks.yml"), []byte(contents), 0o644) require.NoError(t, err) b, diags := MustConfigureBundle(cmd) @@ -99,10 +99,11 @@ func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { testutil.CleanupEnvironment(t) cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("NOEXIST") + err := cmd.Flag("profile").Value.Set("NOEXIST") + require.NoError(t, err) b := setupWithHost(t, cmd, "https://x.com") - _, err := b.InitializeWorkspaceClient() + _, err = b.InitializeWorkspaceClient() assert.ErrorContains(t, err, "has no NOEXIST profile configured") } @@ -110,10 +111,11 @@ func TestBundleConfigureWithMismatchedProfile(t *testing.T) { testutil.CleanupEnvironment(t) cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("PROFILE-1") + err := cmd.Flag("profile").Value.Set("PROFILE-1") + require.NoError(t, err) b := setupWithHost(t, cmd, "https://x.com") - _, err := b.InitializeWorkspaceClient() + _, err = b.InitializeWorkspaceClient() assert.ErrorContains(t, err, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com") } @@ -121,7 +123,8 @@ func TestBundleConfigureWithCorrectProfile(t *testing.T) { testutil.CleanupEnvironment(t) cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("PROFILE-1") + err := cmd.Flag("profile").Value.Set("PROFILE-1") + require.NoError(t, err) b := setupWithHost(t, cmd, "https://a.com") client, err := b.InitializeWorkspaceClient() @@ -146,7 +149,8 @@ func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("PROFILE-1") + err := cmd.Flag("profile").Value.Set("PROFILE-1") + require.NoError(t, err) b := setupWithHost(t, cmd, "https://a.com") client, err := b.InitializeWorkspaceClient() @@ -174,7 +178,8 @@ func TestBundleConfigureProfileFlag(t *testing.T) { // The --profile flag takes precedence over the profile in the databricks.yml file cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("PROFILE-2") + err := cmd.Flag("profile").Value.Set("PROFILE-2") + require.NoError(t, err) b := setupWithProfile(t, cmd, "PROFILE-1") client, err := b.InitializeWorkspaceClient() @@ -205,7 +210,8 @@ func TestBundleConfigureProfileFlagAndEnvVariable(t *testing.T) { // The --profile flag takes precedence over the DATABRICKS_CONFIG_PROFILE environment variable t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") cmd := emptyCommand(t) - cmd.Flag("profile").Value.Set("PROFILE-2") + err := cmd.Flag("profile").Value.Set("PROFILE-2") + require.NoError(t, err) b := setupWithProfile(t, cmd, "PROFILE-1") client, err := b.InitializeWorkspaceClient() diff --git a/cmd/root/io.go b/cmd/root/io.go index b224bbb27..bba989a79 100644 --- a/cmd/root/io.go +++ b/cmd/root/io.go @@ -21,7 +21,7 @@ func initOutputFlag(cmd *cobra.Command) *outputFlag { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := env.Lookup(cmd.Context(), envOutputFormat); ok { - f.output.Set(v) + f.output.Set(v) //nolint:errcheck } cmd.PersistentFlags().VarP(&f.output, "output", "o", "output type: text or json") @@ -45,8 +45,9 @@ func (f *outputFlag) initializeIO(cmd *cobra.Command) error { headerTemplate = cmd.Annotations["headerTemplate"] } - cmdIO := cmdio.NewIO(f.output, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), headerTemplate, template) - ctx := cmdio.InContext(cmd.Context(), cmdIO) + ctx := cmd.Context() + cmdIO := cmdio.NewIO(ctx, f.output, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), headerTemplate, template) + ctx = cmdio.InContext(ctx, cmdIO) cmd.SetContext(ctx) return nil } diff --git a/cmd/root/logger.go b/cmd/root/logger.go index 48cb99a37..38e09b9c9 100644 --- a/cmd/root/logger.go +++ b/cmd/root/logger.go @@ -45,7 +45,10 @@ func (f *logFlags) makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error func (f *logFlags) initializeContext(ctx context.Context) (context.Context, error) { if f.debug { - f.level.Set("debug") + err := f.level.Set("debug") + if err != nil { + return nil, err + } } opts := slog.HandlerOptions{} @@ -81,13 +84,13 @@ func initLogFlags(cmd *cobra.Command) *logFlags { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := env.Lookup(cmd.Context(), envLogFile); ok { - f.file.Set(v) + f.file.Set(v) //nolint:errcheck } if v, ok := env.Lookup(cmd.Context(), envLogLevel); ok { - f.level.Set(v) + f.level.Set(v) //nolint:errcheck } if v, ok := env.Lookup(cmd.Context(), envLogFormat); ok { - f.output.Set(v) + f.output.Set(v) //nolint:errcheck } flags := cmd.PersistentFlags() diff --git a/cmd/root/progress_logger.go b/cmd/root/progress_logger.go index 7d6a1fa46..1458de13a 100644 --- a/cmd/root/progress_logger.go +++ b/cmd/root/progress_logger.go @@ -59,7 +59,7 @@ func initProgressLoggerFlag(cmd *cobra.Command, logFlags *logFlags) *progressLog // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := env.Lookup(cmd.Context(), envProgressFormat); ok { - f.Set(v) + _ = f.Set(v) } flags := cmd.PersistentFlags() diff --git a/cmd/root/progress_logger_test.go b/cmd/root/progress_logger_test.go index 9dceee8d5..42ba1bdc6 100644 --- a/cmd/root/progress_logger_test.go +++ b/cmd/root/progress_logger_test.go @@ -33,27 +33,27 @@ func initializeProgressLoggerTest(t *testing.T) ( func TestInitializeErrorOnIncompatibleConfig(t *testing.T) { plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) - logLevel.Set("info") - logFile.Set("stderr") - progressFormat.Set("inplace") + require.NoError(t, logLevel.Set("info")) + require.NoError(t, logFile.Set("stderr")) + require.NoError(t, progressFormat.Set("inplace")) _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.ErrorContains(t, err, "inplace progress logging cannot be used when log-file is stderr") } func TestNoErrorOnDisabledLogLevel(t *testing.T) { plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) - logLevel.Set("disabled") - logFile.Set("stderr") - progressFormat.Set("inplace") + require.NoError(t, logLevel.Set("disabled")) + require.NoError(t, logFile.Set("stderr")) + require.NoError(t, progressFormat.Set("inplace")) _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } func TestNoErrorOnNonStderrLogFile(t *testing.T) { plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) - logLevel.Set("info") - logFile.Set("stdout") - progressFormat.Set("inplace") + require.NoError(t, logLevel.Set("info")) + require.NoError(t, logFile.Set("stdout")) + require.NoError(t, progressFormat.Set("inplace")) _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } diff --git a/cmd/root/root.go b/cmd/root/root.go index e6f66f126..3b37d0176 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -4,11 +4,10 @@ import ( "context" "errors" "fmt" + "log/slog" "os" "strings" - "log/slog" - "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/dbr" diff --git a/cmd/root/user_agent_upstream.go b/cmd/root/user_agent_upstream.go index f580b4263..a813e8ee7 100644 --- a/cmd/root/user_agent_upstream.go +++ b/cmd/root/user_agent_upstream.go @@ -8,12 +8,16 @@ import ( ) // Environment variables that caller can set to convey what is upstream to this CLI. -const upstreamEnvVar = "DATABRICKS_CLI_UPSTREAM" -const upstreamVersionEnvVar = "DATABRICKS_CLI_UPSTREAM_VERSION" +const ( + upstreamEnvVar = "DATABRICKS_CLI_UPSTREAM" + upstreamVersionEnvVar = "DATABRICKS_CLI_UPSTREAM_VERSION" +) // Keys in the user agent. -const upstreamKey = "upstream" -const upstreamVersionKey = "upstream-version" +const ( + upstreamKey = "upstream" + upstreamVersionKey = "upstream-version" +) func withUpstreamInUserAgent(ctx context.Context) context.Context { value := env.Get(ctx, upstreamEnvVar) diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index 6d722fb08..cd2167a19 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -68,7 +68,6 @@ func (f *syncFlags) syncOptionsFromArgs(cmd *cobra.Command, args []string) (*syn localRoot := vfs.MustNew(args[0]) info, err := git.FetchRepositoryInfo(ctx, localRoot.Native(), client) - if err != nil { log.Warnf(ctx, "Failed to read git info: %s", err) } diff --git a/cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go b/cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go index b1adf6103..3f905e521 100755 --- a/cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go +++ b/cmd/workspace/aibi-dashboard-embedding-access-policy/aibi-dashboard-embedding-access-policy.go @@ -26,6 +26,7 @@ func New() *cobra.Command { } // Add methods + cmd.AddCommand(newDelete()) cmd.AddCommand(newGet()) cmd.AddCommand(newUpdate()) @@ -37,6 +38,62 @@ func New() *cobra.Command { return cmd } +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteAibiDashboardEmbeddingAccessPolicySettingRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteAibiDashboardEmbeddingAccessPolicySettingRequest + + // TODO: short flags + + cmd.Flags().StringVar(&deleteReq.Etag, "etag", deleteReq.Etag, `etag used for versioning.`) + + cmd.Use = "delete" + cmd.Short = `Delete the AI/BI dashboard embedding access policy.` + cmd.Long = `Delete the AI/BI dashboard embedding access policy. + + Delete the AI/BI dashboard embedding access policy, reverting back to the + default.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + response, err := w.Settings.AibiDashboardEmbeddingAccessPolicy().Delete(ctx, deleteReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + // start get command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go b/cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go index 481197460..69db66504 100755 --- a/cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go +++ b/cmd/workspace/aibi-dashboard-embedding-approved-domains/aibi-dashboard-embedding-approved-domains.go @@ -26,6 +26,7 @@ func New() *cobra.Command { } // Add methods + cmd.AddCommand(newDelete()) cmd.AddCommand(newGet()) cmd.AddCommand(newUpdate()) @@ -37,6 +38,62 @@ func New() *cobra.Command { return cmd } +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteAibiDashboardEmbeddingApprovedDomainsSettingRequest + + // TODO: short flags + + cmd.Flags().StringVar(&deleteReq.Etag, "etag", deleteReq.Etag, `etag used for versioning.`) + + cmd.Use = "delete" + cmd.Short = `Delete AI/BI dashboard embedding approved domains.` + cmd.Long = `Delete AI/BI dashboard embedding approved domains. + + Delete the list of domains approved to host embedded AI/BI dashboards, + reverting back to the default empty list.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + response, err := w.Settings.AibiDashboardEmbeddingApprovedDomains().Delete(ctx, deleteReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + // start get command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/clean-room-assets/clean-room-assets.go b/cmd/workspace/clean-room-assets/clean-room-assets.go new file mode 100755 index 000000000..872f0ecef --- /dev/null +++ b/cmd/workspace/clean-room-assets/clean-room-assets.go @@ -0,0 +1,419 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package clean_room_assets + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/cleanrooms" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean-room-assets", + Short: `Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room.`, + Long: `Clean room assets are data and code objects — Tables, volumes, and notebooks + that are shared with the clean room.`, + GroupID: "cleanrooms", + Annotations: map[string]string{ + "package": "cleanrooms", + }, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *cleanrooms.CreateCleanRoomAssetRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq cleanrooms.CreateCleanRoomAssetRequest + createReq.Asset = &cleanrooms.CleanRoomAsset{} + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().Var(&createReq.Asset.AssetType, "asset-type", `The type of the asset. Supported values: [FOREIGN_TABLE, NOTEBOOK_FILE, TABLE, VIEW, VOLUME]`) + // TODO: complex arg: foreign_table + // TODO: complex arg: foreign_table_local_details + cmd.Flags().StringVar(&createReq.Asset.Name, "name", createReq.Asset.Name, `A fully qualified name that uniquely identifies the asset within the clean room.`) + // TODO: complex arg: notebook + // TODO: complex arg: table + // TODO: complex arg: table_local_details + // TODO: complex arg: view + // TODO: complex arg: view_local_details + // TODO: complex arg: volume_local_details + + cmd.Use = "create CLEAN_ROOM_NAME" + cmd.Short = `Create an asset.` + cmd.Long = `Create an asset. + + Create a clean room asset —share an asset like a notebook or table into the + clean room. For each UC asset that is added through this method, the clean + room owner must also have enough privilege on the asset to consume it. The + privilege must be maintained indefinitely for the clean room to be able to + access the asset. Typically, you should use a group as the clean room owner. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq.Asset) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + createReq.CleanRoomName = args[0] + + response, err := w.CleanRoomAssets.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *cleanrooms.DeleteCleanRoomAssetRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq cleanrooms.DeleteCleanRoomAssetRequest + + // TODO: short flags + + cmd.Use = "delete CLEAN_ROOM_NAME ASSET_TYPE ASSET_FULL_NAME" + cmd.Short = `Delete an asset.` + cmd.Long = `Delete an asset. + + Delete a clean room asset - unshare/remove the asset from the clean room + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room. + ASSET_TYPE: The type of the asset. + ASSET_FULL_NAME: The fully qualified name of the asset, it is same as the name field in + CleanRoomAsset.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.CleanRoomName = args[0] + _, err = fmt.Sscan(args[1], &deleteReq.AssetType) + if err != nil { + return fmt.Errorf("invalid ASSET_TYPE: %s", args[1]) + } + deleteReq.AssetFullName = args[2] + + err = w.CleanRoomAssets.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *cleanrooms.GetCleanRoomAssetRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq cleanrooms.GetCleanRoomAssetRequest + + // TODO: short flags + + cmd.Use = "get CLEAN_ROOM_NAME ASSET_TYPE ASSET_FULL_NAME" + cmd.Short = `Get an asset.` + cmd.Long = `Get an asset. + + Get the details of a clean room asset by its type and full name. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room. + ASSET_TYPE: The type of the asset. + ASSET_FULL_NAME: The fully qualified name of the asset, it is same as the name field in + CleanRoomAsset.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.CleanRoomName = args[0] + _, err = fmt.Sscan(args[1], &getReq.AssetType) + if err != nil { + return fmt.Errorf("invalid ASSET_TYPE: %s", args[1]) + } + getReq.AssetFullName = args[2] + + response, err := w.CleanRoomAssets.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *cleanrooms.ListCleanRoomAssetsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq cleanrooms.ListCleanRoomAssetsRequest + + // TODO: short flags + + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) + + cmd.Use = "list CLEAN_ROOM_NAME" + cmd.Short = `List assets.` + cmd.Long = `List assets. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listReq.CleanRoomName = args[0] + + response := w.CleanRoomAssets.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *cleanrooms.UpdateCleanRoomAssetRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq cleanrooms.UpdateCleanRoomAssetRequest + updateReq.Asset = &cleanrooms.CleanRoomAsset{} + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().Var(&updateReq.Asset.AssetType, "asset-type", `The type of the asset. Supported values: [FOREIGN_TABLE, NOTEBOOK_FILE, TABLE, VIEW, VOLUME]`) + // TODO: complex arg: foreign_table + // TODO: complex arg: foreign_table_local_details + cmd.Flags().StringVar(&updateReq.Asset.Name, "name", updateReq.Asset.Name, `A fully qualified name that uniquely identifies the asset within the clean room.`) + // TODO: complex arg: notebook + // TODO: complex arg: table + // TODO: complex arg: table_local_details + // TODO: complex arg: view + // TODO: complex arg: view_local_details + // TODO: complex arg: volume_local_details + + cmd.Use = "update CLEAN_ROOM_NAME ASSET_TYPE NAME" + cmd.Short = `Update an asset.` + cmd.Long = `Update an asset. + + Update a clean room asset. For example, updating the content of a notebook; + changing the shared partitions of a table; etc. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room. + ASSET_TYPE: The type of the asset. + NAME: A fully qualified name that uniquely identifies the asset within the clean + room. This is also the name displayed in the clean room UI. + + For UC securable assets (tables, volumes, etc.), the format is + *shared_catalog*.*shared_schema*.*asset_name* + + For notebooks, the name is the notebook file name.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(3) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq.Asset) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.CleanRoomName = args[0] + _, err = fmt.Sscan(args[1], &updateReq.AssetType) + if err != nil { + return fmt.Errorf("invalid ASSET_TYPE: %s", args[1]) + } + updateReq.Name = args[2] + + response, err := w.CleanRoomAssets.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service CleanRoomAssets diff --git a/cmd/workspace/clean-room-task-runs/clean-room-task-runs.go b/cmd/workspace/clean-room-task-runs/clean-room-task-runs.go new file mode 100755 index 000000000..b41e380cc --- /dev/null +++ b/cmd/workspace/clean-room-task-runs/clean-room-task-runs.go @@ -0,0 +1,97 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package clean_room_task_runs + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/cleanrooms" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean-room-task-runs", + Short: `Clean room task runs are the executions of notebooks in a clean room.`, + Long: `Clean room task runs are the executions of notebooks in a clean room.`, + GroupID: "cleanrooms", + Annotations: map[string]string{ + "package": "cleanrooms", + }, + } + + // Add methods + cmd.AddCommand(newList()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *cleanrooms.ListCleanRoomNotebookTaskRunsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq cleanrooms.ListCleanRoomNotebookTaskRunsRequest + + // TODO: short flags + + cmd.Flags().StringVar(&listReq.NotebookName, "notebook-name", listReq.NotebookName, `Notebook name.`) + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `The maximum number of task runs to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) + + cmd.Use = "list CLEAN_ROOM_NAME" + cmd.Short = `List notebook task runs.` + cmd.Long = `List notebook task runs. + + List all the historical notebook task runs in a clean room. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listReq.CleanRoomName = args[0] + + response := w.CleanRoomTaskRuns.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// end service CleanRoomTaskRuns diff --git a/cmd/workspace/clean-rooms/clean-rooms.go b/cmd/workspace/clean-rooms/clean-rooms.go new file mode 100755 index 000000000..053e41e8a --- /dev/null +++ b/cmd/workspace/clean-rooms/clean-rooms.go @@ -0,0 +1,450 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package clean_rooms + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/cleanrooms" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean-rooms", + Short: `A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data.`, + Long: `A clean room uses Delta Sharing and serverless compute to provide a secure and + privacy-protecting environment where multiple parties can work together on + sensitive enterprise data without direct access to each other’s data.`, + GroupID: "cleanrooms", + Annotations: map[string]string{ + "package": "cleanrooms", + }, + } + + // Add methods + cmd.AddCommand(newCreate()) + cmd.AddCommand(newCreateOutputCatalog()) + cmd.AddCommand(newDelete()) + cmd.AddCommand(newGet()) + cmd.AddCommand(newList()) + cmd.AddCommand(newUpdate()) + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *cleanrooms.CreateCleanRoomRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq cleanrooms.CreateCleanRoomRequest + createReq.CleanRoom = &cleanrooms.CleanRoom{} + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.CleanRoom.Comment, "comment", createReq.CleanRoom.Comment, ``) + cmd.Flags().StringVar(&createReq.CleanRoom.Name, "name", createReq.CleanRoom.Name, `The name of the clean room.`) + // TODO: complex arg: output_catalog + cmd.Flags().StringVar(&createReq.CleanRoom.Owner, "owner", createReq.CleanRoom.Owner, `This is Databricks username of the owner of the local clean room securable for permission management.`) + // TODO: complex arg: remote_detailed_info + + cmd.Use = "create" + cmd.Short = `Create a clean room.` + cmd.Long = `Create a clean room. + + Create a new clean room with the specified collaborators. This method is + asynchronous; the returned name field inside the clean_room field can be used + to poll the clean room status, using the :method:cleanrooms/get method. When + this method returns, the cluster will be in a PROVISIONING state. The cluster + will be usable once it enters an ACTIVE state. + + The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** + privilege on the metastore.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createJson.Unmarshal(&createReq.CleanRoom) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + + response, err := w.CleanRooms.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +// start create-output-catalog command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOutputCatalogOverrides []func( + *cobra.Command, + *cleanrooms.CreateCleanRoomOutputCatalogRequest, +) + +func newCreateOutputCatalog() *cobra.Command { + cmd := &cobra.Command{} + + var createOutputCatalogReq cleanrooms.CreateCleanRoomOutputCatalogRequest + createOutputCatalogReq.OutputCatalog = &cleanrooms.CleanRoomOutputCatalog{} + var createOutputCatalogJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createOutputCatalogJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createOutputCatalogReq.OutputCatalog.CatalogName, "catalog-name", createOutputCatalogReq.OutputCatalog.CatalogName, `The name of the output catalog in UC.`) + + cmd.Use = "create-output-catalog CLEAN_ROOM_NAME" + cmd.Short = `Create an output catalog.` + cmd.Long = `Create an output catalog. + + Create the output catalog of the clean room. + + Arguments: + CLEAN_ROOM_NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := createOutputCatalogJson.Unmarshal(&createOutputCatalogReq.OutputCatalog) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + createOutputCatalogReq.CleanRoomName = args[0] + + response, err := w.CleanRooms.CreateOutputCatalog(ctx, createOutputCatalogReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOutputCatalogOverrides { + fn(cmd, &createOutputCatalogReq) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *cleanrooms.DeleteCleanRoomRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq cleanrooms.DeleteCleanRoomRequest + + // TODO: short flags + + cmd.Use = "delete NAME" + cmd.Short = `Delete a clean room.` + cmd.Long = `Delete a clean room. + + Delete a clean room. After deletion, the clean room will be removed from the + metastore. If the other collaborators have not deleted the clean room, they + will still have the clean room in their metastore, but it will be in a DELETED + state and no operations other than deletion can be performed on it. + + Arguments: + NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.Name = args[0] + + err = w.CleanRooms.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *cleanrooms.GetCleanRoomRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq cleanrooms.GetCleanRoomRequest + + // TODO: short flags + + cmd.Use = "get NAME" + cmd.Short = `Get a clean room.` + cmd.Long = `Get a clean room. + + Get the details of a clean room given its name.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.Name = args[0] + + response, err := w.CleanRooms.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *cleanrooms.ListCleanRoomsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq cleanrooms.ListCleanRoomsRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Maximum number of clean rooms to return (i.e., the page length).`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque pagination token to go to next page based on previous query.`) + + cmd.Use = "list" + cmd.Short = `List clean rooms.` + cmd.Long = `List clean rooms. + + Get a list of all clean rooms of the metastore. Only clean rooms the caller + has access to are returned.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(0) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + response := w.CleanRooms.List(ctx, listReq) + return cmdio.RenderIterator(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *cleanrooms.UpdateCleanRoomRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq cleanrooms.UpdateCleanRoomRequest + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: complex arg: clean_room + + cmd.Use = "update NAME" + cmd.Short = `Update a clean room.` + cmd.Long = `Update a clean room. + + Update a clean room. The caller must be the owner of the clean room, have + **MODIFY_CLEAN_ROOM** privilege, or be metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + Arguments: + NAME: Name of the clean room.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := root.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + diags := updateJson.Unmarshal(&updateReq) + if diags.HasError() { + return diags.Error() + } + if len(diags) > 0 { + err := cmdio.RenderDiagnosticsToErrorOut(ctx, diags) + if err != nil { + return err + } + } + } + updateReq.Name = args[0] + + response, err := w.CleanRooms.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +// end service CleanRooms diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 9cb3cca9e..f07d0cf76 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -8,6 +8,9 @@ import ( apps "github.com/databricks/cli/cmd/workspace/apps" artifact_allowlists "github.com/databricks/cli/cmd/workspace/artifact-allowlists" catalogs "github.com/databricks/cli/cmd/workspace/catalogs" + clean_room_assets "github.com/databricks/cli/cmd/workspace/clean-room-assets" + clean_room_task_runs "github.com/databricks/cli/cmd/workspace/clean-room-task-runs" + clean_rooms "github.com/databricks/cli/cmd/workspace/clean-rooms" cluster_policies "github.com/databricks/cli/cmd/workspace/cluster-policies" clusters "github.com/databricks/cli/cmd/workspace/clusters" connections "github.com/databricks/cli/cmd/workspace/connections" @@ -98,6 +101,9 @@ func All() []*cobra.Command { out = append(out, apps.New()) out = append(out, artifact_allowlists.New()) out = append(out, catalogs.New()) + out = append(out, clean_room_assets.New()) + out = append(out, clean_room_task_runs.New()) + out = append(out, clean_rooms.New()) out = append(out, cluster_policies.New()) out = append(out, clusters.New()) out = append(out, connections.New()) diff --git a/cmd/workspace/credentials/credentials.go b/cmd/workspace/credentials/credentials.go index 44ee0cf31..672a3aeec 100755 --- a/cmd/workspace/credentials/credentials.go +++ b/cmd/workspace/credentials/credentials.go @@ -27,7 +27,7 @@ func New() *cobra.Command { To create credentials, you must be a Databricks account admin or have the CREATE SERVICE CREDENTIAL privilege. The user who creates the credential can - delegate ownership to another user or group to manage permissions on it`, + delegate ownership to another user or group to manage permissions on it.`, GroupID: "catalog", Annotations: map[string]string{ "package": "catalog", @@ -73,7 +73,7 @@ func newCreateCredential() *cobra.Command { // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal cmd.Flags().StringVar(&createCredentialReq.Comment, "comment", createCredentialReq.Comment, `Comment associated with the credential.`) - // TODO: complex arg: gcp_service_account_key + // TODO: complex arg: databricks_gcp_service_account cmd.Flags().Var(&createCredentialReq.Purpose, "purpose", `Indicates the purpose of the credential. Supported values: [SERVICE, STORAGE]`) cmd.Flags().BoolVar(&createCredentialReq.ReadOnly, "read-only", createCredentialReq.ReadOnly, `Whether the credential is usable only for read operations.`) cmd.Flags().BoolVar(&createCredentialReq.SkipValidation, "skip-validation", createCredentialReq.SkipValidation, `Optional.`) @@ -227,6 +227,7 @@ func newGenerateTemporaryServiceCredential() *cobra.Command { cmd.Flags().Var(&generateTemporaryServiceCredentialJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: azure_options + // TODO: complex arg: gcp_options cmd.Use = "generate-temporary-service-credential CREDENTIAL_NAME" cmd.Short = `Generate a temporary service credential.` @@ -434,6 +435,7 @@ func newUpdateCredential() *cobra.Command { // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal cmd.Flags().StringVar(&updateCredentialReq.Comment, "comment", updateCredentialReq.Comment, `Comment associated with the credential.`) + // TODO: complex arg: databricks_gcp_service_account cmd.Flags().BoolVar(&updateCredentialReq.Force, "force", updateCredentialReq.Force, `Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent external locations and external tables (when purpose is **STORAGE**).`) cmd.Flags().Var(&updateCredentialReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces. Supported values: [ISOLATION_MODE_ISOLATED, ISOLATION_MODE_OPEN]`) cmd.Flags().StringVar(&updateCredentialReq.NewName, "new-name", updateCredentialReq.NewName, `New name of credential.`) diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index 98e474d33..8827682fa 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -72,5 +72,9 @@ func Groups() []cobra.Group { ID: "apps", Title: "Apps", }, + { + ID: "cleanrooms", + Title: "Clean Rooms", + }, } } diff --git a/cmd/workspace/lakeview/lakeview.go b/cmd/workspace/lakeview/lakeview.go index 35c3bdf4e..f19038062 100755 --- a/cmd/workspace/lakeview/lakeview.go +++ b/cmd/workspace/lakeview/lakeview.go @@ -160,9 +160,6 @@ func newCreateSchedule() *cobra.Command { Arguments: DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -242,9 +239,6 @@ func newCreateSubscription() *cobra.Command { DASHBOARD_ID: UUID identifying the dashboard to which the subscription belongs. SCHEDULE_ID: UUID identifying the schedule to which the subscription belongs.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -322,9 +316,6 @@ func newDeleteSchedule() *cobra.Command { DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. SCHEDULE_ID: UUID identifying the schedule.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -384,9 +375,6 @@ func newDeleteSubscription() *cobra.Command { SCHEDULE_ID: UUID identifying the schedule which the subscription belongs. SUBSCRIPTION_ID: UUID identifying the subscription.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -562,9 +550,6 @@ func newGetSchedule() *cobra.Command { DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. SCHEDULE_ID: UUID identifying the schedule.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -624,9 +609,6 @@ func newGetSubscription() *cobra.Command { SCHEDULE_ID: UUID identifying the schedule which the subscription belongs. SUBSCRIPTION_ID: UUID identifying the subscription.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -739,9 +721,6 @@ func newListSchedules() *cobra.Command { Arguments: DASHBOARD_ID: UUID identifying the dashboard to which the schedules belongs.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -798,9 +777,6 @@ func newListSubscriptions() *cobra.Command { DASHBOARD_ID: UUID identifying the dashboard which the subscriptions belongs. SCHEDULE_ID: UUID identifying the schedule which the subscriptions belongs.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { @@ -1215,9 +1191,6 @@ func newUpdateSchedule() *cobra.Command { DASHBOARD_ID: UUID identifying the dashboard to which the schedule belongs. SCHEDULE_ID: UUID identifying the schedule.` - // This command is being previewed; hide from help output. - cmd.Hidden = true - cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/workspace/workspace/export_dir.go b/cmd/workspace/workspace/export_dir.go index 0046f46ef..febe4c3e1 100644 --- a/cmd/workspace/workspace/export_dir.go +++ b/cmd/workspace/workspace/export_dir.go @@ -39,7 +39,7 @@ func (opts exportDirOptions) callback(ctx context.Context, workspaceFiler filer. // create directory and return early if d.IsDir() { - return os.MkdirAll(targetPath, 0755) + return os.MkdirAll(targetPath, 0o755) } // Add extension to local file path if the file is a notebook diff --git a/cmd/workspace/workspace/overrides.go b/cmd/workspace/workspace/overrides.go index cfed0a6ee..216e9b5d8 100644 --- a/cmd/workspace/workspace/overrides.go +++ b/cmd/workspace/workspace/overrides.go @@ -52,7 +52,7 @@ func exportOverride(exportCmd *cobra.Command, exportReq *workspace.ExportRequest if err != nil { return err } - return os.WriteFile(filePath, b, 0755) + return os.WriteFile(filePath, b, 0o755) } } @@ -88,7 +88,6 @@ func importOverride(importCmd *cobra.Command, importReq *workspace.Import) { err := originalRunE(cmd, args) return wrapImportAPIErrors(err, importReq) } - } func init() { diff --git a/go.mod b/go.mod index 7141ed768..c9a008fb3 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.23.2 require ( github.com/Masterminds/semver/v3 v3.3.1 // MIT github.com/briandowns/spinner v1.23.1 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.52.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.53.0 // Apache 2.0 github.com/fatih/color v1.18.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.6.0 // BSD-3-Clause @@ -26,9 +26,9 @@ require ( golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 golang.org/x/mod v0.22.0 golang.org/x/oauth2 v0.24.0 - golang.org/x/sync v0.9.0 - golang.org/x/term v0.26.0 - golang.org/x/text v0.20.0 + golang.org/x/sync v0.10.0 + golang.org/x/term v0.27.0 + golang.org/x/text v0.21.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 gopkg.in/yaml.v3 v3.0.1 ) @@ -62,9 +62,9 @@ require ( go.opentelemetry.io/otel v1.24.0 // indirect go.opentelemetry.io/otel/metric v1.24.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect - golang.org/x/crypto v0.24.0 // indirect + golang.org/x/crypto v0.31.0 // indirect golang.org/x/net v0.26.0 // indirect - golang.org/x/sys v0.27.0 // indirect + golang.org/x/sys v0.28.0 // indirect golang.org/x/time v0.5.0 // indirect google.golang.org/api v0.182.0 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240521202816-d264139d666e // indirect diff --git a/go.sum b/go.sum index 5d2c53a37..63bf2be33 100644 --- a/go.sum +++ b/go.sum @@ -32,8 +32,8 @@ github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGX github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= -github.com/databricks/databricks-sdk-go v0.52.0 h1:WKcj0F+pdx0gjI5xMicjYC4O43S2q5nyTpaGGMFmgHw= -github.com/databricks/databricks-sdk-go v0.52.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= +github.com/databricks/databricks-sdk-go v0.53.0 h1:rZMXaTC3HNKZt+m4C4I/dY3EdZj+kl/sVd/Kdq55Qfo= +github.com/databricks/databricks-sdk-go v0.53.0/go.mod h1:ds+zbv5mlQG7nFEU5ojLtgN/u0/9YzZmKQES/CfedzU= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -176,8 +176,8 @@ go.opentelemetry.io/otel/trace v1.24.0 h1:CsKnnL4dUAr/0llH9FKuc698G04IrpWV0MQA/Y go.opentelemetry.io/otel/trace v1.24.0/go.mod h1:HPc3Xr/cOApsBI154IU0OI0HJexz+aw5uPdbs3UCjNU= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI= -golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM= +golang.org/x/crypto v0.31.0 h1:ihbySMvVjLAeSH1IbfcRTkD/iNscyz8rGzjF/E5hV6U= +golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= @@ -200,8 +200,8 @@ golang.org/x/oauth2 v0.24.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbht golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= +golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -212,14 +212,14 @@ golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= -golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= -golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA= +golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q= +golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= -golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= +golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/integration/README.md b/integration/README.md new file mode 100644 index 000000000..1c1d7c6f6 --- /dev/null +++ b/integration/README.md @@ -0,0 +1,37 @@ +# Integration tests + +This directory contains integration tests for the project. + +The tree structure generally mirrors the source code tree structure. + +Requirements for new files in this directory: +* Every package **must** be named after its directory with `_test` appended + * Requiring a different package name for integration tests avoids aliasing with the main package. +* Every integration test package **must** include a `main_test.go` file. + +These requirements are enforced by a unit test in this directory. + +## Running integration tests + +Integration tests require the following environment variables: +* `CLOUD_ENV` - set to the cloud environment to use (e.g. `aws`, `azure`, `gcp`) +* `DATABRICKS_HOST` - set to the Databricks workspace to use +* `DATABRICKS_TOKEN` - set to the Databricks token to use + +Optional environment variables: +* `TEST_DEFAULT_WAREHOUSE_ID` - set to the default warehouse ID to use +* `TEST_METASTORE_ID` - set to the metastore ID to use +* `TEST_INSTANCE_POOL_ID` - set to the instance pool ID to use +* `TEST_BRICKS_CLUSTER_ID` - set to the cluster ID to use + +To run all integration tests, use the following command: + +```bash +go test ./integration/... +``` + +Alternatively: + +```bash +make integration +``` diff --git a/internal/dashboard_assumptions_test.go b/integration/assumptions/dashboard_assumptions_test.go similarity index 90% rename from internal/dashboard_assumptions_test.go rename to integration/assumptions/dashboard_assumptions_test.go index 64294873d..51a809498 100644 --- a/internal/dashboard_assumptions_test.go +++ b/integration/assumptions/dashboard_assumptions_test.go @@ -1,10 +1,11 @@ -package internal +package assumptions_test import ( "encoding/base64" "testing" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/dyn" "github.com/databricks/cli/libs/dyn/convert" "github.com/databricks/cli/libs/dyn/merge" @@ -18,16 +19,16 @@ import ( // Verify that importing a dashboard through the Workspace API retains the identity of the underying resource, // as well as properties exclusively accessible through the dashboards API. -func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) { +func TestDashboardAssumptions_WorkspaceImport(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) t.Parallel() dashboardName := "New Dashboard" dashboardPayload := []byte(`{"pages":[{"name":"2506f97a","displayName":"New Page"}]}`) - warehouseId := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") + warehouseId := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") - dir := wt.TemporaryWorkspaceDir("dashboard-assumptions-") + dir := acc.TemporaryWorkspaceDir(wt, "dashboard-assumptions-") dashboard, err := wt.W.Lakeview.Create(ctx, dashboards.CreateDashboardRequest{ Dashboard: &dashboards.Dashboard{ @@ -98,7 +99,7 @@ func TestAccDashboardAssumptions_WorkspaceImport(t *testing.T) { assert.Fail(t, "unexpected insert operation") return right, nil }, - VisitUpdate: func(basePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { + VisitUpdate: func(basePath dyn.Path, left, right dyn.Value) (dyn.Value, error) { updatedFieldPaths = append(updatedFieldPaths, basePath.String()) return right, nil }, diff --git a/integration/assumptions/main_test.go b/integration/assumptions/main_test.go new file mode 100644 index 000000000..be2761385 --- /dev/null +++ b/integration/assumptions/main_test.go @@ -0,0 +1,13 @@ +package assumptions_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/bundle/artifacts_test.go b/integration/bundle/artifacts_test.go similarity index 85% rename from internal/bundle/artifacts_test.go rename to integration/bundle/artifacts_test.go index 34d101e4f..3a5da721c 100644 --- a/internal/bundle/artifacts_test.go +++ b/integration/bundle/artifacts_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "fmt" @@ -12,8 +12,10 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/cli/bundle/libraries" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" @@ -23,21 +25,20 @@ import ( ) func touchEmptyFile(t *testing.T, path string) { - err := os.MkdirAll(filepath.Dir(path), 0700) + err := os.MkdirAll(filepath.Dir(path), 0o700) require.NoError(t, err) f, err := os.Create(path) require.NoError(t, err) f.Close() } -func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { +func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - w := wt.W dir := t.TempDir() whlPath := filepath.Join(dir, "dist", "test.whl") touchEmptyFile(t, whlPath) - wsDir := internal.TemporaryWorkspaceDir(t, w) + wsDir := acc.TemporaryWorkspaceDir(wt, "artifact-") b := &bundle.Bundle{ BundleRootPath: dir, @@ -95,14 +96,13 @@ func TestAccUploadArtifactFileToCorrectRemotePath(t *testing.T) { ) } -func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) { +func TestUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - w := wt.W dir := t.TempDir() whlPath := filepath.Join(dir, "dist", "test.whl") touchEmptyFile(t, whlPath) - wsDir := internal.TemporaryWorkspaceDir(t, w) + wsDir := acc.TemporaryWorkspaceDir(wt, "artifact-") b := &bundle.Bundle{ BundleRootPath: dir, @@ -160,15 +160,14 @@ func TestAccUploadArtifactFileToCorrectRemotePathWithEnvironments(t *testing.T) ) } -func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) { +func TestUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - w := wt.W if os.Getenv("TEST_METASTORE_ID") == "" { t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") } - volumePath := internal.TemporaryUcVolume(t, w) + volumePath := acc.TemporaryVolume(wt) dir := t.TempDir() whlPath := filepath.Join(dir, "dist", "test.whl") @@ -230,11 +229,11 @@ func TestAccUploadArtifactFileToCorrectRemotePathForVolumes(t *testing.T) { ) } -func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { +func TestUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W - schemaName := internal.RandomName("schema-") + schemaName := testutil.RandomName("schema-") _, err := w.Schemas.Create(ctx, catalog.CreateSchema{ CatalogName: "main", @@ -248,15 +247,14 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { require.NoError(t, err) }) - bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ "unique_id": uuid.New().String(), "schema_name": schemaName, "volume_name": "doesnotexist", }) - require.NoError(t, err) - t.Setenv("BUNDLE_ROOT", bundleRoot) - stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + stdout, stderr, err := testcli.RequireErrorRun(t, ctx, "bundle", "deploy") assert.Error(t, err) assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/doesnotexist does not exist: Not Found @@ -267,11 +265,11 @@ func TestAccUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) { assert.Equal(t, "", stderr.String()) } -func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) { +func TestUploadArtifactToVolumeNotYetDeployed(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W - schemaName := internal.RandomName("schema-") + schemaName := testutil.RandomName("schema-") _, err := w.Schemas.Create(ctx, catalog.CreateSchema{ CatalogName: "main", @@ -285,15 +283,14 @@ func TestAccUploadArtifactToVolumeNotYetDeployed(t *testing.T) { require.NoError(t, err) }) - bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{ "unique_id": uuid.New().String(), "schema_name": schemaName, "volume_name": "my_volume", }) - require.NoError(t, err) - t.Setenv("BUNDLE_ROOT", bundleRoot) - stdout, stderr, err := internal.RequireErrorRun(t, "bundle", "deploy") + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + stdout, stderr, err := testcli.RequireErrorRun(t, ctx, "bundle", "deploy") assert.Error(t, err) assert.Equal(t, fmt.Sprintf(`Error: volume /Volumes/main/%s/my_volume does not exist: Not Found diff --git a/integration/bundle/basic_test.go b/integration/bundle/basic_test.go new file mode 100644 index 000000000..e458706e0 --- /dev/null +++ b/integration/bundle/basic_test.go @@ -0,0 +1,37 @@ +package bundle_test + +import ( + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) { + ctx, _ := acc.WorkspaceTest(t) + + nodeTypeId := testutil.GetCloud(t).NodeTypeID() + uniqueId := uuid.New().String() + root := initTestTemplate(t, ctx, "basic", map[string]any{ + "unique_id": uniqueId, + "node_type_id": nodeTypeId, + "spark_version": defaultSparkVersion, + }) + + t.Cleanup(func() { + destroyBundle(t, ctx, root) + }) + + // deploy empty bundle + deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"}) + + // Remove .databricks directory to simulate a fresh deployment + require.NoError(t, os.RemoveAll(filepath.Join(root, ".databricks"))) + + // deploy empty bundle again + deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"}) +} diff --git a/internal/bundle/bind_resource_test.go b/integration/bundle/bind_resource_test.go similarity index 66% rename from internal/bundle/bind_resource_test.go rename to integration/bundle/bind_resource_test.go index 8cc5da536..b51235770 100644 --- a/internal/bundle/bind_resource_test.go +++ b/integration/bundle/bind_resource_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "fmt" @@ -6,8 +6,10 @@ import ( "path/filepath" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/google/uuid" @@ -15,39 +17,33 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccBindJobToExistingJob(t *testing.T) { - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - +func TestBindJobToExistingJob(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - gt := &generateJobTest{T: t, w: wt.W} + gt := &generateJobTest{T: wt, w: wt.W} - nodeTypeId := internal.GetNodeTypeId(env) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "spark_version": "13.3.x-scala2.12", "node_type_id": nodeTypeId, }) - require.NoError(t, err) jobId := gt.createTestJob(ctx) t.Cleanup(func() { gt.destroyJob(ctx, jobId) - require.NoError(t, err) }) - t.Setenv("BUNDLE_ROOT", bundleRoot) - c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve") - _, _, err = c.Run() + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + c := testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve") + _, _, err := c.Run() require.NoError(t, err) // Remove .databricks directory to simulate a fresh deployment err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) w, err := databricks.NewWorkspaceClient() require.NoError(t, err) @@ -60,7 +56,7 @@ func TestAccBindJobToExistingJob(t *testing.T) { require.Equal(t, job.Settings.Name, fmt.Sprintf("test-job-basic-%s", uniqueId)) require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") - c = internal.NewCobraTestRunner(t, "bundle", "deployment", "unbind", "foo") + c = testcli.NewRunner(t, ctx, "bundle", "deployment", "unbind", "foo") _, _, err = c.Run() require.NoError(t, err) @@ -68,8 +64,7 @@ func TestAccBindJobToExistingJob(t *testing.T) { err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks")) require.NoError(t, err) - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) // Check that job is unbound and exists after bundle is destroyed job, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ @@ -80,21 +75,17 @@ func TestAccBindJobToExistingJob(t *testing.T) { require.Contains(t, job.Settings.Tasks[0].SparkPythonTask.PythonFile, "hello_world.py") } -func TestAccAbortBind(t *testing.T) { - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - +func TestAbortBind(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - gt := &generateJobTest{T: t, w: wt.W} + gt := &generateJobTest{T: wt, w: wt.W} - nodeTypeId := internal.GetNodeTypeId(env) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "spark_version": "13.3.x-scala2.12", "node_type_id": nodeTypeId, }) - require.NoError(t, err) jobId := gt.createTestJob(ctx) t.Cleanup(func() { @@ -103,17 +94,16 @@ func TestAccAbortBind(t *testing.T) { }) // Bind should fail because prompting is not possible. - t.Setenv("BUNDLE_ROOT", bundleRoot) - t.Setenv("TERM", "dumb") - c := internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId)) + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + c := testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId)) // Expect error suggesting to use --auto-approve - _, _, err = c.Run() + _, _, err := c.Run() assert.ErrorContains(t, err, "failed to bind the resource") assert.ErrorContains(t, err, "This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) w, err := databricks.NewWorkspaceClient() require.NoError(t, err) @@ -128,18 +118,14 @@ func TestAccAbortBind(t *testing.T) { require.Contains(t, job.Settings.Tasks[0].NotebookTask.NotebookPath, "test") } -func TestAccGenerateAndBind(t *testing.T) { - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - +func TestGenerateAndBind(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - gt := &generateJobTest{T: t, w: wt.W} + gt := &generateJobTest{T: wt, w: wt.W} uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "with_includes", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) w, err := databricks.NewWorkspaceClient() require.NoError(t, err) @@ -154,8 +140,8 @@ func TestAccGenerateAndBind(t *testing.T) { } }) - t.Setenv("BUNDLE_ROOT", bundleRoot) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "job", + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + c := testcli.NewRunner(t, ctx, "bundle", "generate", "job", "--key", "test_job_key", "--existing-job-id", fmt.Sprint(jobId), "--config-dir", filepath.Join(bundleRoot, "resources"), @@ -171,15 +157,13 @@ func TestAccGenerateAndBind(t *testing.T) { require.Len(t, matches, 1) - c = internal.NewCobraTestRunner(t, "bundle", "deployment", "bind", "test_job_key", fmt.Sprint(jobId), "--auto-approve") + c = testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "test_job_key", fmt.Sprint(jobId), "--auto-approve") _, _, err = c.Run() require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) // Check that job is bound and does not extsts after bundle is destroyed _, err = w.Jobs.Get(ctx, jobs.GetJobRequest{ diff --git a/internal/bundle/bundles/artifact_path_with_volume/databricks_template_schema.json b/integration/bundle/bundles/artifact_path_with_volume/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/artifact_path_with_volume/databricks_template_schema.json rename to integration/bundle/bundles/artifact_path_with_volume/databricks_template_schema.json diff --git a/internal/bundle/bundles/artifact_path_with_volume/template/databricks.yml.tmpl b/integration/bundle/bundles/artifact_path_with_volume/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/artifact_path_with_volume/template/databricks.yml.tmpl rename to integration/bundle/bundles/artifact_path_with_volume/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/basic/databricks_template_schema.json b/integration/bundle/bundles/basic/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/basic/databricks_template_schema.json rename to integration/bundle/bundles/basic/databricks_template_schema.json diff --git a/internal/bundle/bundles/basic/template/databricks.yml.tmpl b/integration/bundle/bundles/basic/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/basic/template/databricks.yml.tmpl rename to integration/bundle/bundles/basic/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/basic/template/hello_world.py b/integration/bundle/bundles/basic/template/hello_world.py similarity index 100% rename from internal/bundle/bundles/basic/template/hello_world.py rename to integration/bundle/bundles/basic/template/hello_world.py diff --git a/internal/bundle/bundles/clusters/databricks_template_schema.json b/integration/bundle/bundles/clusters/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/clusters/databricks_template_schema.json rename to integration/bundle/bundles/clusters/databricks_template_schema.json diff --git a/internal/bundle/bundles/clusters/template/databricks.yml.tmpl b/integration/bundle/bundles/clusters/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/clusters/template/databricks.yml.tmpl rename to integration/bundle/bundles/clusters/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/clusters/template/hello_world.py b/integration/bundle/bundles/clusters/template/hello_world.py similarity index 100% rename from internal/bundle/bundles/clusters/template/hello_world.py rename to integration/bundle/bundles/clusters/template/hello_world.py diff --git a/internal/bundle/bundles/dashboards/databricks_template_schema.json b/integration/bundle/bundles/dashboards/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/dashboards/databricks_template_schema.json rename to integration/bundle/bundles/dashboards/databricks_template_schema.json diff --git a/internal/bundle/bundles/dashboards/template/dashboard.lvdash.json b/integration/bundle/bundles/dashboards/template/dashboard.lvdash.json similarity index 100% rename from internal/bundle/bundles/dashboards/template/dashboard.lvdash.json rename to integration/bundle/bundles/dashboards/template/dashboard.lvdash.json diff --git a/internal/bundle/bundles/dashboards/template/databricks.yml.tmpl b/integration/bundle/bundles/dashboards/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/dashboards/template/databricks.yml.tmpl rename to integration/bundle/bundles/dashboards/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json b/integration/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json rename to integration/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/bar.py b/integration/bundle/bundles/deploy_then_remove_resources/template/bar.py similarity index 100% rename from internal/bundle/bundles/deploy_then_remove_resources/template/bar.py rename to integration/bundle/bundles/deploy_then_remove_resources/template/bar.py diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl b/integration/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl rename to integration/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py b/integration/bundle/bundles/deploy_then_remove_resources/template/foo.py similarity index 100% rename from internal/bundle/bundles/deploy_then_remove_resources/template/foo.py rename to integration/bundle/bundles/deploy_then_remove_resources/template/foo.py diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl b/integration/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl similarity index 100% rename from internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl rename to integration/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl diff --git a/internal/bundle/bundles/job_metadata/databricks_template_schema.json b/integration/bundle/bundles/job_metadata/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/job_metadata/databricks_template_schema.json rename to integration/bundle/bundles/job_metadata/databricks_template_schema.json diff --git a/internal/bundle/bundles/job_metadata/template/a/b/bar.py b/integration/bundle/bundles/job_metadata/template/a/b/bar.py similarity index 100% rename from internal/bundle/bundles/job_metadata/template/a/b/bar.py rename to integration/bundle/bundles/job_metadata/template/a/b/bar.py diff --git a/internal/bundle/bundles/job_metadata/template/a/b/resources.yml.tmpl b/integration/bundle/bundles/job_metadata/template/a/b/resources.yml.tmpl similarity index 100% rename from internal/bundle/bundles/job_metadata/template/a/b/resources.yml.tmpl rename to integration/bundle/bundles/job_metadata/template/a/b/resources.yml.tmpl diff --git a/internal/bundle/bundles/job_metadata/template/databricks.yml.tmpl b/integration/bundle/bundles/job_metadata/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/job_metadata/template/databricks.yml.tmpl rename to integration/bundle/bundles/job_metadata/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/job_metadata/template/foo.py b/integration/bundle/bundles/job_metadata/template/foo.py similarity index 100% rename from internal/bundle/bundles/job_metadata/template/foo.py rename to integration/bundle/bundles/job_metadata/template/foo.py diff --git a/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json b/integration/bundle/bundles/python_wheel_task/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/python_wheel_task/databricks_template_schema.json rename to integration/bundle/bundles/python_wheel_task/databricks_template_schema.json diff --git a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/integration/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl rename to integration/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl b/integration/bundle/bundles/python_wheel_task/template/setup.py.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl rename to integration/bundle/bundles/python_wheel_task/template/setup.py.tmpl diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py b/integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py rename to integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py b/integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py rename to integration/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py diff --git a/internal/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json b/integration/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json rename to integration/bundle/bundles/python_wheel_task_with_cluster/databricks_template_schema.json diff --git a/internal/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl b/integration/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl rename to integration/bundle/bundles/python_wheel_task_with_cluster/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl b/integration/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl rename to integration/bundle/bundles/python_wheel_task_with_cluster/template/setup.py.tmpl diff --git a/internal/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py b/integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py rename to integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__init__.py diff --git a/internal/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py b/integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py rename to integration/bundle/bundles/python_wheel_task_with_cluster/template/{{.project_name}}/__main__.py diff --git a/internal/bundle/bundles/python_wheel_task_with_environments/databricks_template_schema.json b/integration/bundle/bundles/python_wheel_task_with_environments/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_environments/databricks_template_schema.json rename to integration/bundle/bundles/python_wheel_task_with_environments/databricks_template_schema.json diff --git a/internal/bundle/bundles/python_wheel_task_with_environments/template/databricks.yml.tmpl b/integration/bundle/bundles/python_wheel_task_with_environments/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_environments/template/databricks.yml.tmpl rename to integration/bundle/bundles/python_wheel_task_with_environments/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/python_wheel_task_with_environments/template/setup.py.tmpl b/integration/bundle/bundles/python_wheel_task_with_environments/template/setup.py.tmpl similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_environments/template/setup.py.tmpl rename to integration/bundle/bundles/python_wheel_task_with_environments/template/setup.py.tmpl diff --git a/internal/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__init__.py b/integration/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__init__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__init__.py rename to integration/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__init__.py diff --git a/internal/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__main__.py b/integration/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__main__.py similarity index 100% rename from internal/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__main__.py rename to integration/bundle/bundles/python_wheel_task_with_environments/template/{{.project_name}}/__main__.py diff --git a/internal/bundle/bundles/recreate_pipeline/databricks_template_schema.json b/integration/bundle/bundles/recreate_pipeline/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/recreate_pipeline/databricks_template_schema.json rename to integration/bundle/bundles/recreate_pipeline/databricks_template_schema.json diff --git a/internal/bundle/bundles/recreate_pipeline/template/databricks.yml.tmpl b/integration/bundle/bundles/recreate_pipeline/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/recreate_pipeline/template/databricks.yml.tmpl rename to integration/bundle/bundles/recreate_pipeline/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/recreate_pipeline/template/nb.sql b/integration/bundle/bundles/recreate_pipeline/template/nb.sql similarity index 100% rename from internal/bundle/bundles/recreate_pipeline/template/nb.sql rename to integration/bundle/bundles/recreate_pipeline/template/nb.sql diff --git a/internal/bundle/bundles/spark_jar_task/databricks_template_schema.json b/integration/bundle/bundles/spark_jar_task/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/spark_jar_task/databricks_template_schema.json rename to integration/bundle/bundles/spark_jar_task/databricks_template_schema.json diff --git a/internal/bundle/bundles/spark_jar_task/template/databricks.yml.tmpl b/integration/bundle/bundles/spark_jar_task/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/spark_jar_task/template/databricks.yml.tmpl rename to integration/bundle/bundles/spark_jar_task/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/spark_jar_task/template/{{.project_name}}/META-INF/MANIFEST.MF b/integration/bundle/bundles/spark_jar_task/template/{{.project_name}}/META-INF/MANIFEST.MF similarity index 100% rename from internal/bundle/bundles/spark_jar_task/template/{{.project_name}}/META-INF/MANIFEST.MF rename to integration/bundle/bundles/spark_jar_task/template/{{.project_name}}/META-INF/MANIFEST.MF diff --git a/internal/bundle/bundles/spark_jar_task/template/{{.project_name}}/PrintArgs.java b/integration/bundle/bundles/spark_jar_task/template/{{.project_name}}/PrintArgs.java similarity index 100% rename from internal/bundle/bundles/spark_jar_task/template/{{.project_name}}/PrintArgs.java rename to integration/bundle/bundles/spark_jar_task/template/{{.project_name}}/PrintArgs.java diff --git a/internal/bundle/bundles/uc_schema/databricks_template_schema.json b/integration/bundle/bundles/uc_schema/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/uc_schema/databricks_template_schema.json rename to integration/bundle/bundles/uc_schema/databricks_template_schema.json diff --git a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl b/integration/bundle/bundles/uc_schema/template/databricks.yml.tmpl similarity index 93% rename from internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl rename to integration/bundle/bundles/uc_schema/template/databricks.yml.tmpl index 15076ac85..0cb8d4f61 100644 --- a/internal/bundle/bundles/uc_schema/template/databricks.yml.tmpl +++ b/integration/bundle/bundles/uc_schema/template/databricks.yml.tmpl @@ -12,7 +12,6 @@ resources: - notebook: path: ./nb.sql development: true - catalog: main include: - "*.yml" diff --git a/internal/bundle/bundles/uc_schema/template/nb.sql b/integration/bundle/bundles/uc_schema/template/nb.sql similarity index 100% rename from internal/bundle/bundles/uc_schema/template/nb.sql rename to integration/bundle/bundles/uc_schema/template/nb.sql diff --git a/internal/bundle/bundles/uc_schema/template/schema.yml.tmpl b/integration/bundle/bundles/uc_schema/template/schema.yml.tmpl similarity index 91% rename from internal/bundle/bundles/uc_schema/template/schema.yml.tmpl rename to integration/bundle/bundles/uc_schema/template/schema.yml.tmpl index 50067036e..0fcf10453 100644 --- a/internal/bundle/bundles/uc_schema/template/schema.yml.tmpl +++ b/integration/bundle/bundles/uc_schema/template/schema.yml.tmpl @@ -11,3 +11,4 @@ targets: pipelines: foo: target: ${resources.schemas.bar.id} + catalog: main diff --git a/internal/bundle/bundles/volume/databricks_template_schema.json b/integration/bundle/bundles/volume/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/volume/databricks_template_schema.json rename to integration/bundle/bundles/volume/databricks_template_schema.json diff --git a/internal/bundle/bundles/volume/template/databricks.yml.tmpl b/integration/bundle/bundles/volume/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/volume/template/databricks.yml.tmpl rename to integration/bundle/bundles/volume/template/databricks.yml.tmpl diff --git a/internal/bundle/bundles/volume/template/nb.sql b/integration/bundle/bundles/volume/template/nb.sql similarity index 100% rename from internal/bundle/bundles/volume/template/nb.sql rename to integration/bundle/bundles/volume/template/nb.sql diff --git a/internal/bundle/bundles/with_includes/databricks_template_schema.json b/integration/bundle/bundles/with_includes/databricks_template_schema.json similarity index 100% rename from internal/bundle/bundles/with_includes/databricks_template_schema.json rename to integration/bundle/bundles/with_includes/databricks_template_schema.json diff --git a/internal/bundle/bundles/with_includes/template/databricks.yml.tmpl b/integration/bundle/bundles/with_includes/template/databricks.yml.tmpl similarity index 100% rename from internal/bundle/bundles/with_includes/template/databricks.yml.tmpl rename to integration/bundle/bundles/with_includes/template/databricks.yml.tmpl diff --git a/internal/bundle/clusters_test.go b/integration/bundle/clusters_test.go similarity index 71% rename from internal/bundle/clusters_test.go rename to integration/bundle/clusters_test.go index a961f3ea8..7992ecd7d 100644 --- a/internal/bundle/clusters_test.go +++ b/integration/bundle/clusters_test.go @@ -1,37 +1,33 @@ -package bundle +package bundle_test import ( "fmt" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/testutil" - "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/google/uuid" "github.com/stretchr/testify/require" ) -func TestAccDeployBundleWithCluster(t *testing.T) { +func TestDeployBundleWithCluster(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - if testutil.IsAWSCloud(wt.T) { + if testutil.IsAWSCloud(wt) { t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters") } - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - root, err := initTestTemplate(t, ctx, "clusters", map[string]any{ + root := initTestTemplate(t, ctx, "clusters", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) t.Cleanup(func() { - err = destroyBundle(t, ctx, root) - require.NoError(t, err) + destroyBundle(t, ctx, root) cluster, err := wt.W.Clusters.GetByClusterName(ctx, fmt.Sprintf("test-cluster-%s", uniqueId)) if err != nil { @@ -39,11 +35,9 @@ func TestAccDeployBundleWithCluster(t *testing.T) { } else { require.Contains(t, []compute.State{compute.StateTerminated, compute.StateTerminating}, cluster.State) } - }) - err = deployBundle(t, ctx, root) - require.NoError(t, err) + deployBundle(t, ctx, root) // Cluster should exists after bundle deployment cluster, err := wt.W.Clusters.GetByClusterName(ctx, fmt.Sprintf("test-cluster-%s", uniqueId)) diff --git a/internal/bundle/dashboards_test.go b/integration/bundle/dashboards_test.go similarity index 79% rename from internal/bundle/dashboards_test.go rename to integration/bundle/dashboards_test.go index 3c2e27c62..985ef8611 100644 --- a/internal/bundle/dashboards_test.go +++ b/integration/bundle/dashboards_test.go @@ -1,10 +1,11 @@ -package bundle +package bundle_test import ( "fmt" "testing" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go/service/dashboards" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/google/uuid" @@ -12,24 +13,21 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccDashboards(t *testing.T) { +func TestDashboards(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - warehouseID := acc.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") + warehouseID := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID") uniqueID := uuid.New().String() - root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{ + root := initTestTemplate(t, ctx, "dashboards", map[string]any{ "unique_id": uniqueID, "warehouse_id": warehouseID, }) - require.NoError(t, err) t.Cleanup(func() { - err = destroyBundle(t, ctx, root) - require.NoError(t, err) + destroyBundle(t, ctx, root) }) - err = deployBundle(t, ctx, root) - require.NoError(t, err) + deployBundle(t, ctx, root) // Load bundle configuration by running the validate command. b := unmarshalConfig(t, mustValidateBundle(t, ctx, root)) @@ -54,12 +52,11 @@ func TestAccDashboards(t *testing.T) { require.NoError(t, err) // Try to redeploy the bundle and confirm that the out of band modification is detected. - stdout, _, err := deployBundleWithArgs(t, ctx, root) + stdout, _, err := deployBundleWithArgsErr(t, ctx, root) require.Error(t, err) assert.Contains(t, stdout, `Error: dashboard "file_reference" has been modified remotely`+"\n") // Redeploy the bundle with the --force flag and confirm that the out of band modification is ignored. - _, stderr, err := deployBundleWithArgs(t, ctx, root, "--force") - require.NoError(t, err) + _, stderr := deployBundleWithArgs(t, ctx, root, "--force") assert.Contains(t, stderr, `Deployment complete!`+"\n") } diff --git a/internal/bundle/deploy_test.go b/integration/bundle/deploy_test.go similarity index 80% rename from internal/bundle/deploy_test.go rename to integration/bundle/deploy_test.go index 759e85de5..2c3110190 100644 --- a/internal/bundle/deploy_test.go +++ b/integration/bundle/deploy_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "context" @@ -11,8 +11,9 @@ import ( "testing" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" @@ -24,13 +25,11 @@ import ( ) func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.WorkspaceClient, uniqueId string) string { - bundleRoot, err := initTestTemplate(t, ctx, "uc_schema", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "uc_schema", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -80,7 +79,7 @@ func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.Worksp return bundleRoot } -func TestAccBundleDeployUcSchema(t *testing.T) { +func TestBundleDeployUcSchema(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W @@ -95,8 +94,7 @@ func TestAccBundleDeployUcSchema(t *testing.T) { require.NoError(t, err) // Redeploy the bundle - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // Assert the schema is deleted _, err = w.Schemas.GetByFullName(ctx, strings.Join([]string{catalogName, schemaName}, ".")) @@ -105,7 +103,7 @@ func TestAccBundleDeployUcSchema(t *testing.T) { assert.Equal(t, "SCHEMA_DOES_NOT_EXIST", apiErr.ErrorCode) } -func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { +func TestBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W @@ -117,9 +115,9 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { require.NoError(t, err) // Redeploy the bundle - t.Setenv("BUNDLE_ROOT", bundleRoot) - t.Setenv("TERM", "dumb") - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + c := testcli.NewRunner(t, ctx, "bundle", "deploy", "--force-lock") stdout, stderr, err := c.Run() assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) @@ -127,22 +125,20 @@ func TestAccBundleDeployUcSchemaFailsWithoutAutoApprove(t *testing.T) { assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") } -func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) { +func TestBundlePipelineDeleteWithoutAutoApprove(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) // deploy pipeline - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // assert pipeline is created pipelineName := "test-bundle-pipeline-" + uniqueId @@ -161,9 +157,9 @@ func TestAccBundlePipelineDeleteWithoutAutoApprove(t *testing.T) { require.NoError(t, err) // Redeploy the bundle. Expect it to fail because deleting the pipeline requires --auto-approve. - t.Setenv("BUNDLE_ROOT", bundleRoot) - t.Setenv("TERM", "dumb") - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock") + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + c := testcli.NewRunner(t, ctx, "bundle", "deploy", "--force-lock") stdout, stderr, err := c.Run() assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) @@ -173,21 +169,18 @@ restore the defined STs and MVs through full refresh. Note that recreation is ne properties such as the 'catalog' or 'storage' are changed: delete pipeline bar`) assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") - } -func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) { +func TestBundlePipelineRecreateWithoutAutoApprove(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "recreate_pipeline", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "recreate_pipeline", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -200,9 +193,9 @@ func TestAccBundlePipelineRecreateWithoutAutoApprove(t *testing.T) { require.Equal(t, pipelineName, pipeline.Name) // Redeploy the bundle, pointing the DLT pipeline to a different UC catalog. - t.Setenv("BUNDLE_ROOT", bundleRoot) - t.Setenv("TERM", "dumb") - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--var=\"catalog=whatever\"") + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + c := testcli.NewRunner(t, ctx, "bundle", "deploy", "--force-lock", "--var=\"catalog=whatever\"") stdout, stderr, err := c.Run() assert.EqualError(t, err, root.ErrAlreadyPrinted.Error()) @@ -214,27 +207,25 @@ properties such as the 'catalog' or 'storage' are changed: assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") } -func TestAccDeployBasicBundleLogs(t *testing.T) { +func TestDeployBasicBundleLogs(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - root, err := initTestTemplate(t, ctx, "basic", map[string]any{ + root := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) t.Cleanup(func() { - err = destroyBundle(t, ctx, root) - require.NoError(t, err) + destroyBundle(t, ctx, root) }) currentUser, err := wt.W.CurrentUser.Me(ctx) require.NoError(t, err) - stdout, stderr := blackBoxRun(t, root, "bundle", "deploy") + stdout, stderr := blackBoxRun(t, ctx, root, "bundle", "deploy") assert.Equal(t, strings.Join([]string{ fmt.Sprintf("Uploading bundle files to /Workspace/Users/%s/.bundle/%s/files...", currentUser.UserName, uniqueId), "Deploying resources...", @@ -244,18 +235,16 @@ func TestAccDeployBasicBundleLogs(t *testing.T) { assert.Equal(t, "", stdout) } -func TestAccDeployUcVolume(t *testing.T) { +func TestDeployUcVolume(t *testing.T) { ctx, wt := acc.UcWorkspaceTest(t) w := wt.W uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "volume", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "volume", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -280,9 +269,9 @@ func TestAccDeployUcVolume(t *testing.T) { assert.Equal(t, []catalog.Privilege{catalog.PrivilegeWriteVolume}, grants.PrivilegeAssignments[0].Privileges) // Recreation of the volume without --auto-approve should fail since prompting is not possible - t.Setenv("TERM", "dumb") - t.Setenv("BUNDLE_ROOT", bundleRoot) - stdout, stderr, err := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}").Run() + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + stdout, stderr, err := testcli.NewRunner(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}").Run() assert.Error(t, err) assert.Contains(t, stderr.String(), `This action will result in the deletion or recreation of the following volumes. For managed volumes, the files stored in the volume are also deleted from your @@ -292,9 +281,9 @@ is removed from the catalog, but the underlying files are not deleted: assert.Contains(t, stdout.String(), "the deployment requires destructive actions, but current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed") // Successfully recreate the volume with --auto-approve - t.Setenv("TERM", "dumb") - t.Setenv("BUNDLE_ROOT", bundleRoot) - _, _, err = internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}", "--auto-approve").Run() + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "TERM", "dumb") + _, _, err = testcli.NewRunner(t, ctx, "bundle", "deploy", "--var=schema_name=${resources.schemas.schema2.name}", "--auto-approve").Run() assert.NoError(t, err) // Assert the volume is updated successfully diff --git a/internal/bundle/deploy_then_remove_resources_test.go b/integration/bundle/deploy_then_remove_resources_test.go similarity index 69% rename from internal/bundle/deploy_then_remove_resources_test.go rename to integration/bundle/deploy_then_remove_resources_test.go index 66ec5c16a..b792d3623 100644 --- a/internal/bundle/deploy_then_remove_resources_test.go +++ b/integration/bundle/deploy_then_remove_resources_test.go @@ -1,34 +1,31 @@ -package bundle +package bundle_test import ( "os" "path/filepath" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" + "github.com/databricks/cli/internal/testutil" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccBundleDeployThenRemoveResources(t *testing.T) { +func TestBundleDeployThenRemoveResources(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) // deploy pipeline - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // assert pipeline is created pipelineName := "test-bundle-pipeline-" + uniqueId @@ -47,8 +44,7 @@ func TestAccBundleDeployThenRemoveResources(t *testing.T) { require.NoError(t, err) // deploy again - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // assert pipeline is deleted _, err = w.Pipelines.GetByName(ctx, pipelineName) @@ -59,7 +55,6 @@ func TestAccBundleDeployThenRemoveResources(t *testing.T) { assert.ErrorContains(t, err, "does not exist") t.Cleanup(func() { - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) }) } diff --git a/internal/bundle/deploy_to_shared_test.go b/integration/bundle/deploy_to_shared_test.go similarity index 51% rename from internal/bundle/deploy_to_shared_test.go rename to integration/bundle/deploy_to_shared_test.go index 568c1fb56..c97933217 100644 --- a/internal/bundle/deploy_to_shared_test.go +++ b/integration/bundle/deploy_to_shared_test.go @@ -1,38 +1,34 @@ -package bundle +package bundle_test import ( "fmt" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" + "github.com/databricks/cli/internal/testutil" "github.com/google/uuid" "github.com/stretchr/testify/require" ) -func TestAccDeployBasicToSharedWorkspacePath(t *testing.T) { +func TestDeployBasicToSharedWorkspacePath(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() currentUser, err := wt.W.CurrentUser.Me(ctx) require.NoError(t, err) - bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, "root_path": fmt.Sprintf("/Shared/%s", currentUser.UserName), }) - require.NoError(t, err) t.Cleanup(func() { - err = destroyBundle(wt.T, ctx, bundleRoot) - require.NoError(wt.T, err) + destroyBundle(wt, ctx, bundleRoot) }) - err = deployBundle(wt.T, ctx, bundleRoot) - require.NoError(wt.T, err) + deployBundle(wt, ctx, bundleRoot) } diff --git a/internal/bundle/deployment_state_test.go b/integration/bundle/deployment_state_test.go similarity index 78% rename from internal/bundle/deployment_state_test.go rename to integration/bundle/deployment_state_test.go index 25f36d4a2..a7e01643d 100644 --- a/internal/bundle/deployment_state_test.go +++ b/integration/bundle/deployment_state_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "os" @@ -7,43 +7,39 @@ import ( "testing" "github.com/databricks/cli/bundle/deploy" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/env" "github.com/google/uuid" "github.com/stretchr/testify/require" ) -func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) { - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - +func TestFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W - nodeTypeId := internal.GetNodeTypeId(env) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "spark_version": "13.3.x-scala2.12", "node_type_id": nodeTypeId, }) - require.NoError(t, err) - t.Setenv("BUNDLE_ROOT", bundleRoot) + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) // Add some test file to the bundle - err = os.WriteFile(filepath.Join(bundleRoot, "test.py"), []byte("print('Hello, World!')"), 0644) + err := os.WriteFile(filepath.Join(bundleRoot, "test.py"), []byte("print('Hello, World!')"), 0o644) require.NoError(t, err) - err = os.WriteFile(filepath.Join(bundleRoot, "test_to_modify.py"), []byte("print('Hello, World!')"), 0644) + err = os.WriteFile(filepath.Join(bundleRoot, "test_to_modify.py"), []byte("print('Hello, World!')"), 0o644) require.NoError(t, err) // Add notebook to the bundle - err = os.WriteFile(filepath.Join(bundleRoot, "notebook.py"), []byte("# Databricks notebook source\nHello, World!"), 0644) + err = os.WriteFile(filepath.Join(bundleRoot, "notebook.py"), []byte("# Databricks notebook source\nHello, World!"), 0o644) require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -79,11 +75,10 @@ func TestAccFilesAreSyncedCorrectlyWhenNoSnapshot(t *testing.T) { require.NoError(t, err) // Modify the content of another file - err = os.WriteFile(filepath.Join(bundleRoot, "test_to_modify.py"), []byte("print('Modified!')"), 0644) + err = os.WriteFile(filepath.Join(bundleRoot, "test_to_modify.py"), []byte("print('Modified!')"), 0o644) require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // Check that removed file is not in workspace anymore _, err = w.Workspace.GetStatusByPath(ctx, path.Join(remoteRoot, "files", "test.py")) diff --git a/internal/bundle/destroy_test.go b/integration/bundle/destroy_test.go similarity index 80% rename from internal/bundle/destroy_test.go rename to integration/bundle/destroy_test.go index baccf4e6f..59d1816e0 100644 --- a/internal/bundle/destroy_test.go +++ b/integration/bundle/destroy_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "errors" @@ -6,37 +6,34 @@ import ( "path/filepath" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go/apierr" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccBundleDestroy(t *testing.T) { +func TestBundleDestroy(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) snapshotsDir := filepath.Join(bundleRoot, ".databricks", "bundle", "default", "sync-snapshots") // Assert the snapshot file does not exist - _, err = os.ReadDir(snapshotsDir) + _, err := os.ReadDir(snapshotsDir) assert.ErrorIs(t, err, os.ErrNotExist) // deploy resources - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // Assert the snapshot file exists entries, err := os.ReadDir(snapshotsDir) @@ -61,8 +58,7 @@ func TestAccBundleDestroy(t *testing.T) { assert.Equal(t, job.Settings.Name, jobName) // destroy bundle - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) // assert pipeline is deleted _, err = w.Pipelines.GetByName(ctx, pipelineName) diff --git a/internal/bundle/empty_bundle_test.go b/integration/bundle/empty_bundle_test.go similarity index 73% rename from internal/bundle/empty_bundle_test.go rename to integration/bundle/empty_bundle_test.go index 36883ae00..0cf9aad34 100644 --- a/internal/bundle/empty_bundle_test.go +++ b/integration/bundle/empty_bundle_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "fmt" @@ -11,7 +11,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccEmptyBundleDeploy(t *testing.T) { +func TestEmptyBundleDeploy(t *testing.T) { ctx, _ := acc.WorkspaceTest(t) // create empty bundle @@ -26,11 +26,9 @@ func TestAccEmptyBundleDeploy(t *testing.T) { f.Close() // deploy empty bundle - err = deployBundle(t, ctx, tmpDir) - require.NoError(t, err) + deployBundle(t, ctx, tmpDir) t.Cleanup(func() { - err = destroyBundle(t, ctx, tmpDir) - require.NoError(t, err) + destroyBundle(t, ctx, tmpDir) }) } diff --git a/internal/bundle/environments_test.go b/integration/bundle/environments_test.go similarity index 75% rename from internal/bundle/environments_test.go rename to integration/bundle/environments_test.go index 5cffe8857..4f831cf68 100644 --- a/internal/bundle/environments_test.go +++ b/integration/bundle/environments_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "testing" @@ -8,18 +8,16 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) { +func TestPythonWheelTaskWithEnvironmentsDeployAndRun(t *testing.T) { t.Skip("Skipping test until serveless is enabled") ctx, _ := acc.WorkspaceTest(t) - bundleRoot, err := initTestTemplate(t, ctx, "python_wheel_task_with_environments", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "python_wheel_task_with_environments", map[string]any{ "unique_id": uuid.New().String(), }) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) diff --git a/internal/bundle/generate_job_test.go b/integration/bundle/generate_job_test.go similarity index 75% rename from internal/bundle/generate_job_test.go rename to integration/bundle/generate_job_test.go index 847a7a14e..9d0b466ba 100644 --- a/internal/bundle/generate_job_test.go +++ b/integration/bundle/generate_job_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "context" @@ -9,9 +9,10 @@ import ( "strings" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/filer" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/compute" @@ -20,27 +21,26 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) { +func TestGenerateFromExistingJobAndDeploy(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - gt := &generateJobTest{T: t, w: wt.W} + gt := &generateJobTest{T: wt, w: wt.W} uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "with_includes", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) jobId := gt.createTestJob(ctx) t.Cleanup(func() { gt.destroyJob(ctx, jobId) }) - t.Setenv("BUNDLE_ROOT", bundleRoot) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "job", + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + c := testcli.NewRunner(t, ctx, "bundle", "generate", "job", "--existing-job-id", fmt.Sprint(jobId), "--config-dir", filepath.Join(bundleRoot, "resources"), "--source-dir", filepath.Join(bundleRoot, "src")) - _, _, err = c.Run() + _, _, err := c.Run() require.NoError(t, err) _, err = os.Stat(filepath.Join(bundleRoot, "src", "test.py")) @@ -61,15 +61,13 @@ func TestAccGenerateFromExistingJobAndDeploy(t *testing.T) { require.Contains(t, generatedYaml, "spark_version: 13.3.x-scala2.12") require.Contains(t, generatedYaml, "num_workers: 1") - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) } type generateJobTest struct { - T *testing.T + T *acc.WorkspaceT w *databricks.WorkspaceClient } @@ -77,17 +75,7 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 { t := gt.T w := gt.w - var nodeTypeId string - switch testutil.GetCloud(t) { - case testutil.AWS: - nodeTypeId = "i3.xlarge" - case testutil.Azure: - nodeTypeId = "Standard_DS4_v2" - case testutil.GCP: - nodeTypeId = "n1-standard-4" - } - - tmpdir := internal.TemporaryWorkspaceDir(t, w) + tmpdir := acc.TemporaryWorkspaceDir(t, "generate-job-") f, err := filer.NewWorkspaceFilesClient(w, tmpdir) require.NoError(t, err) @@ -95,14 +83,14 @@ func (gt *generateJobTest) createTestJob(ctx context.Context) int64 { require.NoError(t, err) resp, err := w.Jobs.Create(ctx, jobs.CreateJob{ - Name: internal.RandomName("generated-job-"), + Name: testutil.RandomName("generated-job-"), Tasks: []jobs.Task{ { TaskKey: "test", NewCluster: &compute.ClusterSpec{ SparkVersion: "13.3.x-scala2.12", NumWorkers: 1, - NodeTypeId: nodeTypeId, + NodeTypeId: testutil.GetCloud(t).NodeTypeID(), SparkConf: map[string]string{ "spark.databricks.enableWsfs": "true", "spark.databricks.hive.metastore.glueCatalog.enabled": "true", diff --git a/internal/bundle/generate_pipeline_test.go b/integration/bundle/generate_pipeline_test.go similarity index 78% rename from internal/bundle/generate_pipeline_test.go rename to integration/bundle/generate_pipeline_test.go index 82467952d..303d5cb22 100644 --- a/internal/bundle/generate_pipeline_test.go +++ b/integration/bundle/generate_pipeline_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "context" @@ -9,8 +9,10 @@ import ( "strings" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/filer" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -18,27 +20,26 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { +func TestGenerateFromExistingPipelineAndDeploy(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - gt := &generatePipelineTest{T: t, w: wt.W} + gt := &generatePipelineTest{T: wt, w: wt.W} uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "with_includes", map[string]any{ "unique_id": uniqueId, }) - require.NoError(t, err) pipelineId, name := gt.createTestPipeline(ctx) t.Cleanup(func() { gt.destroyPipeline(ctx, pipelineId) }) - t.Setenv("BUNDLE_ROOT", bundleRoot) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "generate", "pipeline", + ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot) + c := testcli.NewRunner(t, ctx, "bundle", "generate", "pipeline", "--existing-pipeline-id", fmt.Sprint(pipelineId), "--config-dir", filepath.Join(bundleRoot, "resources"), "--source-dir", filepath.Join(bundleRoot, "src")) - _, _, err = c.Run() + _, _, err := c.Run() require.NoError(t, err) _, err = os.Stat(filepath.Join(bundleRoot, "src", "notebook.py")) @@ -58,8 +59,8 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { generatedYaml := string(data) // Replace pipeline name - generatedYaml = strings.ReplaceAll(generatedYaml, name, internal.RandomName("copy-generated-pipeline-")) - err = os.WriteFile(fileName, []byte(generatedYaml), 0644) + generatedYaml = strings.ReplaceAll(generatedYaml, name, testutil.RandomName("copy-generated-pipeline-")) + err = os.WriteFile(fileName, []byte(generatedYaml), 0o644) require.NoError(t, err) require.Contains(t, generatedYaml, "libraries:") @@ -68,15 +69,13 @@ func TestAccGenerateFromExistingPipelineAndDeploy(t *testing.T) { require.Contains(t, generatedYaml, "- file:") require.Contains(t, generatedYaml, fmt.Sprintf("path: %s", filepath.Join("..", "src", "test.py"))) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) } type generatePipelineTest struct { - T *testing.T + T *acc.WorkspaceT w *databricks.WorkspaceClient } @@ -84,7 +83,7 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string, t := gt.T w := gt.w - tmpdir := internal.TemporaryWorkspaceDir(t, w) + tmpdir := acc.TemporaryWorkspaceDir(t, "generate-pipeline-") f, err := filer.NewWorkspaceFilesClient(w, tmpdir) require.NoError(t, err) @@ -94,10 +93,9 @@ func (gt *generatePipelineTest) createTestPipeline(ctx context.Context) (string, err = f.Write(ctx, "test.py", strings.NewReader("print('Hello!')")) require.NoError(t, err) - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - nodeTypeId := internal.GetNodeTypeId(env) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() - name := internal.RandomName("generated-pipeline-") + name := testutil.RandomName("generated-pipeline-") resp, err := w.Pipelines.Create(ctx, pipelines.CreatePipeline{ Name: name, Libraries: []pipelines.PipelineLibrary{ diff --git a/internal/bundle/helpers.go b/integration/bundle/helpers_test.go similarity index 50% rename from internal/bundle/helpers.go rename to integration/bundle/helpers_test.go index dd9c841c9..e884cd8c6 100644 --- a/internal/bundle/helpers.go +++ b/integration/bundle/helpers_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "bytes" @@ -9,133 +9,136 @@ import ( "os/exec" "path/filepath" "strings" - "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/flags" + "github.com/databricks/cli/libs/folders" "github.com/databricks/cli/libs/template" - "github.com/databricks/cli/libs/vfs" "github.com/databricks/databricks-sdk-go" "github.com/stretchr/testify/require" ) const defaultSparkVersion = "13.3.x-snapshot-scala2.12" -func initTestTemplate(t *testing.T, ctx context.Context, templateName string, config map[string]any) (string, error) { +func initTestTemplate(t testutil.TestingT, ctx context.Context, templateName string, config map[string]any) string { bundleRoot := t.TempDir() return initTestTemplateWithBundleRoot(t, ctx, templateName, config, bundleRoot) } -func initTestTemplateWithBundleRoot(t *testing.T, ctx context.Context, templateName string, config map[string]any, bundleRoot string) (string, error) { +func initTestTemplateWithBundleRoot(t testutil.TestingT, ctx context.Context, templateName string, config map[string]any, bundleRoot string) string { templateRoot := filepath.Join("bundles", templateName) - configFilePath, err := writeConfigFile(t, config) - if err != nil { - return "", err - } + configFilePath := writeConfigFile(t, config) ctx = root.SetWorkspaceClient(ctx, nil) - cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "", "bundles") + cmd := cmdio.NewIO(ctx, flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "", "bundles") ctx = cmdio.InContext(ctx, cmd) out, err := filer.NewLocalClient(bundleRoot) require.NoError(t, err) err = template.Materialize(ctx, configFilePath, os.DirFS(templateRoot), out) - return bundleRoot, err + require.NoError(t, err) + return bundleRoot } -func writeConfigFile(t *testing.T, config map[string]any) (string, error) { +func writeConfigFile(t testutil.TestingT, config map[string]any) string { bytes, err := json.Marshal(config) - if err != nil { - return "", err - } + require.NoError(t, err) dir := t.TempDir() filepath := filepath.Join(dir, "config.json") t.Log("Configuration for template: ", string(bytes)) - err = os.WriteFile(filepath, bytes, 0644) - return filepath, err + err = os.WriteFile(filepath, bytes, 0o644) + require.NoError(t, err) + return filepath } -func validateBundle(t *testing.T, ctx context.Context, path string) ([]byte, error) { +func validateBundle(t testutil.TestingT, ctx context.Context, path string) ([]byte, error) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "validate", "--output", "json") + c := testcli.NewRunner(t, ctx, "bundle", "validate", "--output", "json") stdout, _, err := c.Run() return stdout.Bytes(), err } -func mustValidateBundle(t *testing.T, ctx context.Context, path string) []byte { +func mustValidateBundle(t testutil.TestingT, ctx context.Context, path string) []byte { data, err := validateBundle(t, ctx, path) require.NoError(t, err) return data } -func unmarshalConfig(t *testing.T, data []byte) *bundle.Bundle { +func unmarshalConfig(t testutil.TestingT, data []byte) *bundle.Bundle { bundle := &bundle.Bundle{} err := json.Unmarshal(data, &bundle.Config) require.NoError(t, err) return bundle } -func deployBundle(t *testing.T, ctx context.Context, path string) error { +func deployBundle(t testutil.TestingT, ctx context.Context, path string) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve") + c := testcli.NewRunner(t, ctx, "bundle", "deploy", "--force-lock", "--auto-approve") _, _, err := c.Run() - return err + require.NoError(t, err) } -func deployBundleWithArgs(t *testing.T, ctx context.Context, path string, args ...string) (string, string, error) { +func deployBundleWithArgsErr(t testutil.TestingT, ctx context.Context, path string, args ...string) (string, string, error) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) args = append([]string{"bundle", "deploy"}, args...) - c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) + c := testcli.NewRunner(t, ctx, args...) stdout, stderr, err := c.Run() return stdout.String(), stderr.String(), err } -func deployBundleWithFlags(t *testing.T, ctx context.Context, path string, flags []string) error { +func deployBundleWithArgs(t testutil.TestingT, ctx context.Context, path string, args ...string) (string, string) { + stdout, stderr, err := deployBundleWithArgsErr(t, ctx, path, args...) + require.NoError(t, err) + return stdout, stderr +} + +func deployBundleWithFlags(t testutil.TestingT, ctx context.Context, path string, flags []string) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) args := []string{"bundle", "deploy", "--force-lock"} args = append(args, flags...) - c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) + c := testcli.NewRunner(t, ctx, args...) _, _, err := c.Run() - return err + require.NoError(t, err) } -func runResource(t *testing.T, ctx context.Context, path string, key string) (string, error) { +func runResource(t testutil.TestingT, ctx context.Context, path, key string) (string, error) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = cmdio.NewContext(ctx, cmdio.Default()) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "run", key) + c := testcli.NewRunner(t, ctx, "bundle", "run", key) stdout, _, err := c.Run() return stdout.String(), err } -func runResourceWithParams(t *testing.T, ctx context.Context, path string, key string, params ...string) (string, error) { +func runResourceWithParams(t testutil.TestingT, ctx context.Context, path, key string, params ...string) (string, error) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) ctx = cmdio.NewContext(ctx, cmdio.Default()) args := make([]string, 0) args = append(args, "bundle", "run", key) args = append(args, params...) - c := internal.NewCobraTestRunnerWithContext(t, ctx, args...) + c := testcli.NewRunner(t, ctx, args...) stdout, _, err := c.Run() return stdout.String(), err } -func destroyBundle(t *testing.T, ctx context.Context, path string) error { +func destroyBundle(t testutil.TestingT, ctx context.Context, path string) { ctx = env.Set(ctx, "BUNDLE_ROOT", path) - c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "destroy", "--auto-approve") + c := testcli.NewRunner(t, ctx, "bundle", "destroy", "--auto-approve") _, _, err := c.Run() - return err + require.NoError(t, err) } -func getBundleRemoteRootPath(w *databricks.WorkspaceClient, t *testing.T, uniqueId string) string { +func getBundleRemoteRootPath(w *databricks.WorkspaceClient, t testutil.TestingT, uniqueId string) string { // Compute root path for the bundle deployment me, err := w.CurrentUser.Me(context.Background()) require.NoError(t, err) @@ -143,16 +146,19 @@ func getBundleRemoteRootPath(w *databricks.WorkspaceClient, t *testing.T, unique return root } -func blackBoxRun(t *testing.T, root string, args ...string) (stdout string, stderr string) { - cwd := vfs.MustNew(".") - gitRoot, err := vfs.FindLeafInTree(cwd, ".git") +func blackBoxRun(t testutil.TestingT, ctx context.Context, root string, args ...string) (stdout, stderr string) { + gitRoot, err := folders.FindDirWithLeaf(".", ".git") require.NoError(t, err) - t.Setenv("BUNDLE_ROOT", root) - // Create the command cmd := exec.Command("go", append([]string{"run", "main.go"}, args...)...) - cmd.Dir = gitRoot.Native() + cmd.Dir = gitRoot + + // Configure the environment + ctx = env.Set(ctx, "BUNDLE_ROOT", root) + for key, value := range env.All(ctx) { + cmd.Env = append(cmd.Env, fmt.Sprintf("%s=%s", key, value)) + } // Create buffers to capture output var outBuffer, errBuffer bytes.Buffer diff --git a/internal/init_test.go b/integration/bundle/init_test.go similarity index 68% rename from internal/init_test.go rename to integration/bundle/init_test.go index 25bfc19da..bc3757fde 100644 --- a/internal/init_test.go +++ b/integration/bundle/init_test.go @@ -1,4 +1,4 @@ -package internal +package bundle_test import ( "context" @@ -11,18 +11,18 @@ import ( "testing" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/iamutil" - "github.com/databricks/databricks-sdk-go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccBundleInitErrorOnUnknownFields(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - +func TestBundleInitErrorOnUnknownFields(t *testing.T) { + ctx := context.Background() tmpDir := t.TempDir() - _, _, err := RequireErrorRun(t, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir) + _, _, err := testcli.RequireErrorRun(t, ctx, "bundle", "init", "./testdata/init/field-does-not-exist", "--output-dir", tmpDir) assert.EqualError(t, err, "failed to compute file content for bar.tmpl. variable \"does_not_exist\" not defined") } @@ -38,17 +38,15 @@ func TestAccBundleInitErrorOnUnknownFields(t *testing.T) { // 2. While rare and to be avoided if possible, the CLI reserves the right to // make changes that can break the MLOps Stacks DAB. In which case we should // skip this test until the MLOps Stacks DAB is updated to work again. -func TestAccBundleInitOnMlopsStacks(t *testing.T) { - t.Parallel() - env := testutil.GetCloud(t).String() +func TestBundleInitOnMlopsStacks(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W tmpDir1 := t.TempDir() tmpDir2 := t.TempDir() - w, err := databricks.NewWorkspaceClient(&databricks.Config{}) - require.NoError(t, err) - - projectName := RandomName("project_name_") + projectName := testutil.RandomName("project_name_") + env := testutil.GetCloud(t).String() // Create a config file with the project name and root dir initConfig := map[string]string{ @@ -59,29 +57,30 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) { } b, err := json.Marshal(initConfig) require.NoError(t, err) - os.WriteFile(filepath.Join(tmpDir1, "config.json"), b, 0644) + err = os.WriteFile(filepath.Join(tmpDir1, "config.json"), b, 0o644) + require.NoError(t, err) // Run bundle init assert.NoFileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md")) - RequireSuccessfulRun(t, "bundle", "init", "mlops-stacks", "--output-dir", tmpDir2, "--config-file", filepath.Join(tmpDir1, "config.json")) + testcli.RequireSuccessfulRun(t, ctx, "bundle", "init", "mlops-stacks", "--output-dir", tmpDir2, "--config-file", filepath.Join(tmpDir1, "config.json")) // Assert that the README.md file was created - assert.FileExists(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md")) - assertLocalFileContents(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md"), fmt.Sprintf("# %s", projectName)) + contents := testutil.ReadFile(t, filepath.Join(tmpDir2, "repo_name", projectName, "README.md")) + assert.Contains(t, contents, fmt.Sprintf("# %s", projectName)) // Validate the stack testutil.Chdir(t, filepath.Join(tmpDir2, "repo_name", projectName)) - RequireSuccessfulRun(t, "bundle", "validate") + testcli.RequireSuccessfulRun(t, ctx, "bundle", "validate") // Deploy the stack - RequireSuccessfulRun(t, "bundle", "deploy") + testcli.RequireSuccessfulRun(t, ctx, "bundle", "deploy") t.Cleanup(func() { // Delete the stack - RequireSuccessfulRun(t, "bundle", "destroy", "--auto-approve") + testcli.RequireSuccessfulRun(t, ctx, "bundle", "destroy", "--auto-approve") }) // Get summary of the bundle deployment - stdout, _ := RequireSuccessfulRun(t, "bundle", "summary", "--output", "json") + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "bundle", "summary", "--output", "json") summary := &config.Root{} err = json.Unmarshal(stdout.Bytes(), summary) require.NoError(t, err) @@ -100,24 +99,23 @@ func TestAccBundleInitOnMlopsStacks(t *testing.T) { assert.Contains(t, job.Settings.Name, fmt.Sprintf("dev-%s-batch-inference-job", projectName)) } -func TestAccBundleInitHelpers(t *testing.T) { - env := GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) +func TestBundleInitHelpers(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W - w, err := databricks.NewWorkspaceClient(&databricks.Config{}) - require.NoError(t, err) - - me, err := w.CurrentUser.Me(context.Background()) + me, err := w.CurrentUser.Me(ctx) require.NoError(t, err) var smallestNode string - switch env { - case "azure": + switch testutil.GetCloud(t) { + case testutil.Azure: smallestNode = "Standard_D3_v2" - case "gcp": + case testutil.GCP: smallestNode = "n1-standard-4" - default: + case testutil.AWS: smallestNode = "i3.xlarge" + default: + t.Fatal("Unknown cloud environment") } tests := []struct { @@ -151,17 +149,18 @@ func TestAccBundleInitHelpers(t *testing.T) { tmpDir := t.TempDir() tmpDir2 := t.TempDir() - err := os.Mkdir(filepath.Join(tmpDir, "template"), 0755) + err := os.Mkdir(filepath.Join(tmpDir, "template"), 0o755) require.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "template", "foo.txt.tmpl"), []byte(test.funcName), 0644) + err = os.WriteFile(filepath.Join(tmpDir, "template", "foo.txt.tmpl"), []byte(test.funcName), 0o644) require.NoError(t, err) - err = os.WriteFile(filepath.Join(tmpDir, "databricks_template_schema.json"), []byte("{}"), 0644) + err = os.WriteFile(filepath.Join(tmpDir, "databricks_template_schema.json"), []byte("{}"), 0o644) require.NoError(t, err) // Run bundle init. - RequireSuccessfulRun(t, "bundle", "init", tmpDir, "--output-dir", tmpDir2) + testcli.RequireSuccessfulRun(t, ctx, "bundle", "init", tmpDir, "--output-dir", tmpDir2) // Assert that the helper function was correctly computed. - assertLocalFileContents(t, filepath.Join(tmpDir2, "foo.txt"), test.expected) + contents := testutil.ReadFile(t, filepath.Join(tmpDir2, "foo.txt")) + assert.Contains(t, contents, test.expected) } } diff --git a/internal/bundle/job_metadata_test.go b/integration/bundle/job_metadata_test.go similarity index 83% rename from internal/bundle/job_metadata_test.go rename to integration/bundle/job_metadata_test.go index 21f1086ae..f470b37da 100644 --- a/internal/bundle/job_metadata_test.go +++ b/integration/bundle/job_metadata_test.go @@ -1,4 +1,4 @@ -package bundle +package bundle_test import ( "context" @@ -10,36 +10,32 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/metadata" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/google/uuid" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccJobsMetadataFile(t *testing.T) { +func TestJobsMetadataFile(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() - bundleRoot, err := initTestTemplate(t, ctx, "job_metadata", map[string]any{ + bundleRoot := initTestTemplate(t, ctx, "job_metadata", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) // deploy bundle - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) // Cleanup the deployed bundle t.Cleanup(func() { - err = destroyBundle(t, ctx, bundleRoot) - require.NoError(t, err) + destroyBundle(t, ctx, bundleRoot) }) // assert job 1 is created diff --git a/internal/bundle/local_state_staleness_test.go b/integration/bundle/local_state_staleness_test.go similarity index 71% rename from internal/bundle/local_state_staleness_test.go rename to integration/bundle/local_state_staleness_test.go index d11234667..c02a38bb1 100644 --- a/internal/bundle/local_state_staleness_test.go +++ b/integration/bundle/local_state_staleness_test.go @@ -1,12 +1,11 @@ -package bundle +package bundle_test import ( "context" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go/listing" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/google/uuid" @@ -14,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccLocalStateStaleness(t *testing.T) { +func TestLocalStateStaleness(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) w := wt.W @@ -25,19 +24,17 @@ func TestAccLocalStateStaleness(t *testing.T) { // Because of deploy (2), the locally cached state of bundle instance A should be stale. // Then for deploy (3), it must use the remote state over the stale local state. - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() uniqueId := uuid.New().String() initialize := func() string { - root, err := initTestTemplate(t, ctx, "basic", map[string]any{ + root := initTestTemplate(t, ctx, "basic", map[string]any{ "unique_id": uniqueId, "node_type_id": nodeTypeId, "spark_version": defaultSparkVersion, }) - require.NoError(t, err) t.Cleanup(func() { - err = destroyBundle(t, ctx, root) - require.NoError(t, err) + destroyBundle(t, ctx, root) }) return root @@ -49,16 +46,13 @@ func TestAccLocalStateStaleness(t *testing.T) { bundleB := initialize() // 1) Deploy bundle A - err = deployBundle(t, ctx, bundleA) - require.NoError(t, err) + deployBundle(t, ctx, bundleA) // 2) Deploy bundle B - err = deployBundle(t, ctx, bundleB) - require.NoError(t, err) + deployBundle(t, ctx, bundleB) // 3) Deploy bundle A again - err = deployBundle(t, ctx, bundleA) - require.NoError(t, err) + deployBundle(t, ctx, bundleA) // Assert that there is only a single job in the workspace corresponding to this bundle. iter := w.Jobs.List(context.Background(), jobs.ListJobsRequest{ diff --git a/integration/bundle/main_test.go b/integration/bundle/main_test.go new file mode 100644 index 000000000..1c44d0aaf --- /dev/null +++ b/integration/bundle/main_test.go @@ -0,0 +1,13 @@ +package bundle_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/bundle/python_wheel_test.go b/integration/bundle/python_wheel_test.go similarity index 70% rename from internal/bundle/python_wheel_test.go rename to integration/bundle/python_wheel_test.go index 846f14177..a90642ecc 100644 --- a/internal/bundle/python_wheel_test.go +++ b/integration/bundle/python_wheel_test.go @@ -1,9 +1,8 @@ -package bundle +package bundle_test import ( "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/env" @@ -11,22 +10,20 @@ import ( "github.com/stretchr/testify/require" ) -func runPythonWheelTest(t *testing.T, templateName string, sparkVersion string, pythonWheelWrapper bool) { +func runPythonWheelTest(t *testing.T, templateName, sparkVersion string, pythonWheelWrapper bool) { ctx, _ := acc.WorkspaceTest(t) - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID") - bundleRoot, err := initTestTemplate(t, ctx, templateName, map[string]any{ + bundleRoot := initTestTemplate(t, ctx, templateName, map[string]any{ "node_type_id": nodeTypeId, "unique_id": uuid.New().String(), "spark_version": sparkVersion, "python_wheel_wrapper": pythonWheelWrapper, "instance_pool_id": instancePoolId, }) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -45,18 +42,18 @@ func runPythonWheelTest(t *testing.T, templateName string, sparkVersion string, require.Contains(t, out, "['my_test_code', 'param1', 'param2']") } -func TestAccPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) { +func TestPythonWheelTaskDeployAndRunWithoutWrapper(t *testing.T) { runPythonWheelTest(t, "python_wheel_task", "13.3.x-snapshot-scala2.12", false) } -func TestAccPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) { +func TestPythonWheelTaskDeployAndRunWithWrapper(t *testing.T) { runPythonWheelTest(t, "python_wheel_task", "12.2.x-scala2.12", true) } -func TestAccPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) { +func TestPythonWheelTaskDeployAndRunOnInteractiveCluster(t *testing.T) { _, wt := acc.WorkspaceTest(t) - if testutil.IsAWSCloud(wt.T) { + if testutil.IsAWSCloud(wt) { t.Skip("Skipping test for AWS cloud because it is not permitted to create clusters") } diff --git a/internal/bundle/spark_jar_test.go b/integration/bundle/spark_jar_test.go similarity index 79% rename from internal/bundle/spark_jar_test.go rename to integration/bundle/spark_jar_test.go index 4b469617c..98acb75ac 100644 --- a/internal/bundle/spark_jar_test.go +++ b/integration/bundle/spark_jar_test.go @@ -1,10 +1,9 @@ -package bundle +package bundle_test import ( "context" "testing" - "github.com/databricks/cli/internal" "github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/env" @@ -12,12 +11,11 @@ import ( "github.com/stretchr/testify/require" ) -func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion string, artifactPath string) { - cloudEnv := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - nodeTypeId := internal.GetNodeTypeId(cloudEnv) +func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion, artifactPath string) { + nodeTypeId := testutil.GetCloud(t).NodeTypeID() tmpDir := t.TempDir() instancePoolId := env.Get(ctx, "TEST_INSTANCE_POOL_ID") - bundleRoot, err := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{ + bundleRoot := initTestTemplateWithBundleRoot(t, ctx, "spark_jar_task", map[string]any{ "node_type_id": nodeTypeId, "unique_id": uuid.New().String(), "spark_version": sparkVersion, @@ -25,10 +23,8 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion strin "artifact_path": artifactPath, "instance_pool_id": instancePoolId, }, tmpDir) - require.NoError(t, err) - err = deployBundle(t, ctx, bundleRoot) - require.NoError(t, err) + deployBundle(t, ctx, bundleRoot) t.Cleanup(func() { destroyBundle(t, ctx, bundleRoot) @@ -41,7 +37,7 @@ func runSparkJarTestCommon(t *testing.T, ctx context.Context, sparkVersion strin func runSparkJarTestFromVolume(t *testing.T, sparkVersion string) { ctx, wt := acc.UcWorkspaceTest(t) - volumePath := internal.TemporaryUcVolume(t, wt.W) + volumePath := acc.TemporaryVolume(wt) ctx = env.Set(ctx, "DATABRICKS_BUNDLE_TARGET", "volume") runSparkJarTestCommon(t, ctx, sparkVersion, volumePath) } @@ -52,8 +48,7 @@ func runSparkJarTestFromWorkspace(t *testing.T, sparkVersion string) { runSparkJarTestCommon(t, ctx, sparkVersion, "n/a") } -func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { - internal.GetEnvOrSkipTest(t, "CLOUD_ENV") +func TestSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { testutil.RequireJDK(t, context.Background(), "1.8.0") // Failure on earlier DBR versions: @@ -76,8 +71,7 @@ func TestAccSparkJarTaskDeployAndRunOnVolumes(t *testing.T) { } } -func TestAccSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { - internal.GetEnvOrSkipTest(t, "CLOUD_ENV") +func TestSparkJarTaskDeployAndRunOnWorkspace(t *testing.T) { testutil.RequireJDK(t, context.Background(), "1.8.0") // Failure on earlier DBR versions: diff --git a/internal/testdata/init/field-does-not-exist/databricks_template_schema.json b/integration/bundle/testdata/init/field-does-not-exist/databricks_template_schema.json similarity index 100% rename from internal/testdata/init/field-does-not-exist/databricks_template_schema.json rename to integration/bundle/testdata/init/field-does-not-exist/databricks_template_schema.json diff --git a/internal/testdata/init/field-does-not-exist/template/bar.tmpl b/integration/bundle/testdata/init/field-does-not-exist/template/bar.tmpl similarity index 100% rename from internal/testdata/init/field-does-not-exist/template/bar.tmpl rename to integration/bundle/testdata/init/field-does-not-exist/template/bar.tmpl diff --git a/internal/bundle/validate_test.go b/integration/bundle/validate_test.go similarity index 90% rename from internal/bundle/validate_test.go rename to integration/bundle/validate_test.go index 18da89e4c..2dd8ada67 100644 --- a/internal/bundle/validate_test.go +++ b/integration/bundle/validate_test.go @@ -1,8 +1,9 @@ -package bundle +package bundle_test import ( "context" "encoding/json" + "path/filepath" "testing" "github.com/databricks/cli/internal/testutil" @@ -12,11 +13,9 @@ import ( "github.com/stretchr/testify/require" ) -func TestAccBundleValidate(t *testing.T) { - testutil.GetEnvOrSkipTest(t, "CLOUD_ENV") - +func TestBundleValidate(t *testing.T) { tmpDir := t.TempDir() - testutil.WriteFile(t, + testutil.WriteFile(t, filepath.Join(tmpDir, "databricks.yml"), ` bundle: name: "foobar" @@ -33,7 +32,7 @@ resources: inner_loop: name: inner loop -`, tmpDir, "databricks.yml") +`) ctx := context.Background() stdout, err := validateBundle(t, ctx, tmpDir) diff --git a/integration/cmd/alerts/alerts_test.go b/integration/cmd/alerts/alerts_test.go new file mode 100644 index 000000000..ca1719813 --- /dev/null +++ b/integration/cmd/alerts/alerts_test.go @@ -0,0 +1,15 @@ +package alerts_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testcli" + "github.com/stretchr/testify/assert" +) + +func TestAlertsCreateErrWhenNoArguments(t *testing.T) { + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "alerts-legacy", "create") + assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error()) +} diff --git a/integration/cmd/alerts/main_test.go b/integration/cmd/alerts/main_test.go new file mode 100644 index 000000000..6987ade02 --- /dev/null +++ b/integration/cmd/alerts/main_test.go @@ -0,0 +1,13 @@ +package alerts_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/integration/cmd/api/api_test.go b/integration/cmd/api/api_test.go new file mode 100644 index 000000000..4cb9b1737 --- /dev/null +++ b/integration/cmd/api/api_test.go @@ -0,0 +1,56 @@ +package api_test + +import ( + "context" + "encoding/json" + "fmt" + "path" + "path/filepath" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + _ "github.com/databricks/cli/cmd/api" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" +) + +func TestApiGet(t *testing.T) { + ctx := context.Background() + + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "api", "get", "/api/2.0/preview/scim/v2/Me") + + // Deserialize SCIM API response. + var out map[string]any + err := json.Unmarshal(stdout.Bytes(), &out) + require.NoError(t, err) + + // Assert that the output somewhat makes sense for the SCIM API. + assert.Equal(t, true, out["active"]) + assert.NotNil(t, out["id"]) +} + +func TestApiPost(t *testing.T) { + ctx := context.Background() + + if testutil.GetCloud(t) == testutil.GCP { + t.Skip("DBFS REST API is disabled on gcp") + } + + dbfsPath := path.Join("/tmp/databricks/integration", testutil.RandomName("api-post")) + requestPath := filepath.Join(t.TempDir(), "body.json") + testutil.WriteFile(t, requestPath, fmt.Sprintf(`{ + "path": "%s" + }`, dbfsPath)) + + // Post to mkdir + { + testcli.RequireSuccessfulRun(t, ctx, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/mkdirs") + } + + // Post to delete + { + testcli.RequireSuccessfulRun(t, ctx, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/delete") + } +} diff --git a/integration/cmd/api/main_test.go b/integration/cmd/api/main_test.go new file mode 100644 index 000000000..70d021790 --- /dev/null +++ b/integration/cmd/api/main_test.go @@ -0,0 +1,13 @@ +package api_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/auth_describe_test.go b/integration/cmd/auth/describe_test.go similarity index 74% rename from internal/auth_describe_test.go rename to integration/cmd/auth/describe_test.go index 90b5d6801..41288dce6 100644 --- a/internal/auth_describe_test.go +++ b/integration/cmd/auth/describe_test.go @@ -1,18 +1,20 @@ -package internal +package auth_test import ( "context" "fmt" "testing" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/databricks-sdk-go" "github.com/stretchr/testify/require" ) func TestAuthDescribeSuccess(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + t.Skipf("Skipping because of https://github.com/databricks/cli/issues/2010") - stdout, _ := RequireSuccessfulRun(t, "auth", "describe") + ctx := context.Background() + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "auth", "describe") outStr := stdout.String() w, err := databricks.NewWorkspaceClient(&databricks.Config{}) @@ -31,9 +33,10 @@ func TestAuthDescribeSuccess(t *testing.T) { } func TestAuthDescribeFailure(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + t.Skipf("Skipping because of https://github.com/databricks/cli/issues/2010") - stdout, _ := RequireSuccessfulRun(t, "auth", "describe", "--profile", "nonexistent") + ctx := context.Background() + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "auth", "describe", "--profile", "nonexistent") outStr := stdout.String() require.NotEmpty(t, outStr) diff --git a/integration/cmd/auth/main_test.go b/integration/cmd/auth/main_test.go new file mode 100644 index 000000000..97b1d740b --- /dev/null +++ b/integration/cmd/auth/main_test.go @@ -0,0 +1,13 @@ +package auth_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/integration/cmd/clusters/clusters_test.go b/integration/cmd/clusters/clusters_test.go new file mode 100644 index 000000000..4cc6cb658 --- /dev/null +++ b/integration/cmd/clusters/clusters_test.go @@ -0,0 +1,63 @@ +package clusters_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/databricks-sdk-go/listing" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestClustersList(t *testing.T) { + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "clusters", "list") + outStr := stdout.String() + assert.Contains(t, outStr, "ID") + assert.Contains(t, outStr, "Name") + assert.Contains(t, outStr, "State") + assert.Equal(t, "", stderr.String()) + + idRegExp := regexp.MustCompile(`[0-9]{4}\-[0-9]{6}-[a-z0-9]{8}`) + clusterId := idRegExp.FindString(outStr) + assert.NotEmpty(t, clusterId) +} + +func TestClustersGet(t *testing.T) { + ctx := context.Background() + clusterId := findValidClusterID(t) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "clusters", "get", clusterId) + outStr := stdout.String() + assert.Contains(t, outStr, fmt.Sprintf(`"cluster_id":"%s"`, clusterId)) + assert.Equal(t, "", stderr.String()) +} + +func TestClusterCreateErrorWhenNoArguments(t *testing.T) { + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "clusters", "create") + assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") +} + +// findValidClusterID lists clusters in the workspace to find a valid cluster ID. +func findValidClusterID(t *testing.T) string { + ctx, wt := acc.WorkspaceTest(t) + it := wt.W.Clusters.List(ctx, compute.ListClustersRequest{ + FilterBy: &compute.ListClustersFilterBy{ + ClusterSources: []compute.ClusterSource{ + compute.ClusterSourceApi, + compute.ClusterSourceUi, + }, + }, + }) + + clusterIDs, err := listing.ToSliceN(ctx, it, 1) + require.NoError(t, err) + require.Len(t, clusterIDs, 1) + + return clusterIDs[0].ClusterId +} diff --git a/integration/cmd/clusters/main_test.go b/integration/cmd/clusters/main_test.go new file mode 100644 index 000000000..ccd5660e7 --- /dev/null +++ b/integration/cmd/clusters/main_test.go @@ -0,0 +1,13 @@ +package clusters_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/fs_cat_test.go b/integration/cmd/fs/cat_test.go similarity index 57% rename from internal/fs_cat_test.go rename to integration/cmd/fs/cat_test.go index 6292aef18..b0f99ae4e 100644 --- a/internal/fs_cat_test.go +++ b/integration/cmd/fs/cat_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -7,13 +7,14 @@ import ( "strings" "testing" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/filer" - "github.com/databricks/databricks-sdk-go" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccFsCat(t *testing.T) { +func TestFsCat(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -22,18 +23,20 @@ func TestAccFsCat(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) + err := f.Write(context.Background(), "hello.txt", strings.NewReader("abcd"), filer.CreateParentDirectories) require.NoError(t, err) - stdout, stderr := RequireSuccessfulRun(t, "fs", "cat", path.Join(tmpDir, "hello.txt")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "cat", path.Join(tmpDir, "hello.txt")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "abcd", stdout.String()) }) } } -func TestAccFsCatOnADir(t *testing.T) { +func TestFsCatOnADir(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -42,17 +45,19 @@ func TestAccFsCatOnADir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) + err := f.Mkdir(context.Background(), "dir1") require.NoError(t, err) - _, _, err = RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "dir1")) + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "cat", path.Join(tmpDir, "dir1")) assert.ErrorAs(t, err, &filer.NotAFile{}) }) } } -func TestAccFsCatOnNonExistentFile(t *testing.T) { +func TestFsCatOnNonExistentFile(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -61,36 +66,32 @@ func TestAccFsCatOnNonExistentFile(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() _, tmpDir := tc.setupFiler(t) - _, _, err := RequireErrorRun(t, "fs", "cat", path.Join(tmpDir, "non-existent-file")) + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "cat", path.Join(tmpDir, "non-existent-file")) assert.ErrorIs(t, err, fs.ErrNotExist) }) } } -func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - _, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") +func TestFsCatForDbfsInvalidScheme(t *testing.T) { + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "cat", "dab:/non-existent-file") assert.ErrorContains(t, err, "invalid scheme: dab") } -func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) - - tmpDir := TemporaryDbfsDir(t, w) +func TestFsCatDoesNotSupportOutputModeJson(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W + tmpDir := acc.TemporaryDbfsDir(wt, "fs-cat-") f, err := filer.NewDbfsClient(w, tmpDir) require.NoError(t, err) err = f.Write(ctx, "hello.txt", strings.NewReader("abc")) require.NoError(t, err) - _, _, err = RequireErrorRun(t, "fs", "cat", "dbfs:"+path.Join(tmpDir, "hello.txt"), "--output=json") + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "cat", "dbfs:"+path.Join(tmpDir, "hello.txt"), "--output=json") assert.ErrorContains(t, err, "json output not supported") } diff --git a/internal/completer_test.go b/integration/cmd/fs/completion_test.go similarity index 72% rename from internal/completer_test.go rename to integration/cmd/fs/completion_test.go index b2c936886..88ce2fcc1 100644 --- a/internal/completer_test.go +++ b/integration/cmd/fs/completion_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -7,6 +7,7 @@ import ( "testing" _ "github.com/databricks/cli/cmd/fs" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -17,11 +18,12 @@ func setupCompletionFile(t *testing.T, f filer.Filer) { require.NoError(t, err) } -func TestAccFsCompletion(t *testing.T) { +func TestFsCompletion(t *testing.T) { + ctx := context.Background() f, tmpDir := setupDbfsFiler(t) setupCompletionFile(t, f) - stdout, _ := RequireSuccessfulRun(t, "__complete", "fs", "ls", tmpDir+"/") + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "__complete", "fs", "ls", tmpDir+"/") expectedOutput := fmt.Sprintf("%s/dir1/\n:2\n", tmpDir) assert.Equal(t, expectedOutput, stdout.String()) } diff --git a/internal/fs_cp_test.go b/integration/cmd/fs/cp_test.go similarity index 76% rename from internal/fs_cp_test.go rename to integration/cmd/fs/cp_test.go index b69735bc0..76aef7acf 100644 --- a/internal/fs_cp_test.go +++ b/integration/cmd/fs/cp_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -10,6 +10,8 @@ import ( "strings" "testing" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -61,8 +63,8 @@ func assertTargetDir(t *testing.T, ctx context.Context, f filer.Filer) { type cpTest struct { name string - setupSource func(*testing.T) (filer.Filer, string) - setupTarget func(*testing.T) (filer.Filer, string) + setupSource func(testutil.TestingT) (filer.Filer, string) + setupTarget func(testutil.TestingT) (filer.Filer, string) } func copyTests() []cpTest { @@ -120,7 +122,7 @@ func copyTests() []cpTest { } } -func TestAccFsCpDir(t *testing.T) { +func TestFsCpDir(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -129,18 +131,19 @@ func TestAccFsCpDir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) - RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive") + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", sourceDir, targetDir, "--recursive") assertTargetDir(t, context.Background(), targetFiler) }) } } -func TestAccFsCpFileToFile(t *testing.T) { +func TestFsCpFileToFile(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -149,18 +152,19 @@ func TestAccFsCpFileToFile(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceFile(t, context.Background(), sourceFiler) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), path.Join(targetDir, "bar.txt")) + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "foo.txt"), path.Join(targetDir, "bar.txt")) assertTargetFile(t, context.Background(), targetFiler, "bar.txt") }) } } -func TestAccFsCpFileToDir(t *testing.T) { +func TestFsCpFileToDir(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -169,18 +173,19 @@ func TestAccFsCpFileToDir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceFile(t, context.Background(), sourceFiler) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "foo.txt"), targetDir) + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "foo.txt"), targetDir) assertTargetFile(t, context.Background(), targetFiler, "foo.txt") }) } } -func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) { +func TestFsCpFileToDirForWindowsPaths(t *testing.T) { if runtime.GOOS != "windows" { t.Skip("Skipping test on non-windows OS") } @@ -192,11 +197,11 @@ func TestAccFsCpFileToDirForWindowsPaths(t *testing.T) { windowsPath := filepath.Join(filepath.FromSlash(sourceDir), "foo.txt") - RequireSuccessfulRun(t, "fs", "cp", windowsPath, targetDir) + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", windowsPath, targetDir) assertTargetFile(t, ctx, targetFiler, "foo.txt") } -func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) { +func TestFsCpDirToDirFileNotOverwritten(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -205,6 +210,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -213,7 +219,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive") + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", sourceDir, targetDir, "--recursive") assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten") assertFileContent(t, context.Background(), targetFiler, "query.sql", "SELECT 1") assertFileContent(t, context.Background(), targetFiler, "pyNb.py", "# Databricks notebook source\nprint(123)") @@ -221,7 +227,7 @@ func TestAccFsCpDirToDirFileNotOverwritten(t *testing.T) { } } -func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) { +func TestFsCpFileToDirFileNotOverwritten(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -230,6 +236,7 @@ func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -238,13 +245,13 @@ func TestAccFsCpFileToDirFileNotOverwritten(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c")) + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c")) assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "this should not be overwritten") }) } } -func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) { +func TestFsCpFileToFileFileNotOverwritten(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -253,6 +260,7 @@ func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -261,13 +269,13 @@ func TestAccFsCpFileToFileFileNotOverwritten(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/dontoverwrite.txt", strings.NewReader("this should not be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/dontoverwrite.txt")) + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/dontoverwrite.txt")) assertFileContent(t, context.Background(), targetFiler, "a/b/c/dontoverwrite.txt", "this should not be overwritten") }) } } -func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) { +func TestFsCpDirToDirWithOverwriteFlag(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -276,6 +284,7 @@ func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -284,13 +293,13 @@ func TestAccFsCpDirToDirWithOverwriteFlag(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", sourceDir, targetDir, "--recursive", "--overwrite") + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", sourceDir, targetDir, "--recursive", "--overwrite") assertTargetDir(t, context.Background(), targetFiler) }) } } -func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) { +func TestFsCpFileToFileWithOverwriteFlag(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -299,6 +308,7 @@ func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -307,13 +317,13 @@ func TestAccFsCpFileToFileWithOverwriteFlag(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/overwritten.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/overwritten.txt"), "--overwrite") + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c/overwritten.txt"), "--overwrite") assertFileContent(t, context.Background(), targetFiler, "a/b/c/overwritten.txt", "hello, world\n") }) } } -func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) { +func TestFsCpFileToDirWithOverwriteFlag(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -322,6 +332,7 @@ func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -330,13 +341,13 @@ func TestAccFsCpFileToDirWithOverwriteFlag(t *testing.T) { err := targetFiler.Write(context.Background(), "a/b/c/hello.txt", strings.NewReader("this should be overwritten"), filer.CreateParentDirectories) require.NoError(t, err) - RequireSuccessfulRun(t, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c"), "--overwrite") + testcli.RequireSuccessfulRun(t, ctx, "fs", "cp", path.Join(sourceDir, "a/b/c/hello.txt"), path.Join(targetDir, "a/b/c"), "--overwrite") assertFileContent(t, context.Background(), targetFiler, "a/b/c/hello.txt", "hello, world\n") }) } } -func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) { +func TestFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -345,23 +356,23 @@ func TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() _, tmpDir := tc.setupFiler(t) - _, _, err := RequireErrorRun(t, "fs", "cp", path.Join(tmpDir), path.Join(tmpDir, "foobar")) + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "cp", path.Join(tmpDir), path.Join(tmpDir, "foobar")) r := regexp.MustCompile("source path .* is a directory. Please specify the --recursive flag") assert.Regexp(t, r, err.Error()) }) } } -func TestAccFsCpErrorsOnInvalidScheme(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - _, _, err := RequireErrorRun(t, "fs", "cp", "dbfs:/a", "https:/b") +func TestFsCpErrorsOnInvalidScheme(t *testing.T) { + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "cp", "dbfs:/a", "https:/b") assert.Equal(t, "invalid scheme: https", err.Error()) } -func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) { +func TestFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) { t.Parallel() for _, testCase := range copyTests() { @@ -370,6 +381,7 @@ func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() sourceFiler, sourceDir := tc.setupSource(t) targetFiler, targetDir := tc.setupTarget(t) setupSourceDir(t, context.Background(), sourceFiler) @@ -378,7 +390,7 @@ func TestAccFsCpSourceIsDirectoryButTargetIsFile(t *testing.T) { err := targetFiler.Write(context.Background(), "my_target", strings.NewReader("I'll block any attempts to recursively copy"), filer.CreateParentDirectories) require.NoError(t, err) - _, _, err = RequireErrorRun(t, "fs", "cp", sourceDir, path.Join(targetDir, "my_target"), "--recursive") + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "cp", sourceDir, path.Join(targetDir, "my_target"), "--recursive") assert.Error(t, err) }) } diff --git a/integration/cmd/fs/helpers_test.go b/integration/cmd/fs/helpers_test.go new file mode 100644 index 000000000..da4fd48cf --- /dev/null +++ b/integration/cmd/fs/helpers_test.go @@ -0,0 +1,44 @@ +package fs_test + +import ( + "os" + "path" + "path/filepath" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" + + "github.com/databricks/cli/libs/filer" + "github.com/stretchr/testify/require" +) + +func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) { + tmp := t.TempDir() + f, err := filer.NewLocalClient(tmp) + require.NoError(t, err) + + return f, path.Join(filepath.ToSlash(tmp)) +} + +func setupDbfsFiler(t testutil.TestingT) (filer.Filer, string) { + _, wt := acc.WorkspaceTest(t) + + tmpdir := acc.TemporaryDbfsDir(wt) + f, err := filer.NewDbfsClient(wt.W, tmpdir) + require.NoError(t, err) + return f, path.Join("dbfs:/", tmpdir) +} + +func setupUcVolumesFiler(t testutil.TestingT) (filer.Filer, string) { + _, wt := acc.WorkspaceTest(t) + + if os.Getenv("TEST_METASTORE_ID") == "" { + t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") + } + + tmpdir := acc.TemporaryVolume(wt) + f, err := filer.NewFilesClient(wt.W, tmpdir) + require.NoError(t, err) + + return f, path.Join("dbfs:/", tmpdir) +} diff --git a/internal/fs_ls_test.go b/integration/cmd/fs/ls_test.go similarity index 73% rename from internal/fs_ls_test.go rename to integration/cmd/fs/ls_test.go index 994a4a425..58e776d8a 100644 --- a/internal/fs_ls_test.go +++ b/integration/cmd/fs/ls_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -10,6 +10,8 @@ import ( "testing" _ "github.com/databricks/cli/cmd/fs" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -17,7 +19,7 @@ import ( type fsTest struct { name string - setupFiler func(t *testing.T) (filer.Filer, string) + setupFiler func(t testutil.TestingT) (filer.Filer, string) } var fsTests = []fsTest{ @@ -38,7 +40,7 @@ func setupLsFiles(t *testing.T, f filer.Filer) { require.NoError(t, err) } -func TestAccFsLs(t *testing.T) { +func TestFsLs(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -47,10 +49,11 @@ func TestAccFsLs(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) setupLsFiles(t, f) - stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json") + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "ls", tmpDir, "--output=json") assert.Equal(t, "", stderr.String()) var parsedStdout []map[string]any @@ -71,7 +74,7 @@ func TestAccFsLs(t *testing.T) { } } -func TestAccFsLsWithAbsolutePaths(t *testing.T) { +func TestFsLsWithAbsolutePaths(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -80,10 +83,11 @@ func TestAccFsLsWithAbsolutePaths(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) setupLsFiles(t, f) - stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json", "--absolute") + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "ls", tmpDir, "--output=json", "--absolute") assert.Equal(t, "", stderr.String()) var parsedStdout []map[string]any @@ -104,7 +108,7 @@ func TestAccFsLsWithAbsolutePaths(t *testing.T) { } } -func TestAccFsLsOnFile(t *testing.T) { +func TestFsLsOnFile(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -112,17 +116,19 @@ func TestAccFsLsOnFile(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + + ctx := context.Background() f, tmpDir := tc.setupFiler(t) setupLsFiles(t, f) - _, _, err := RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "a", "hello.txt"), "--output=json") + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "ls", path.Join(tmpDir, "a", "hello.txt"), "--output=json") assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error()) assert.ErrorAs(t, err, &filer.NotADirectory{}) }) } } -func TestAccFsLsOnEmptyDir(t *testing.T) { +func TestFsLsOnEmptyDir(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -131,9 +137,10 @@ func TestAccFsLsOnEmptyDir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() _, tmpDir := tc.setupFiler(t) - stdout, stderr := RequireSuccessfulRun(t, "fs", "ls", tmpDir, "--output=json") + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "ls", tmpDir, "--output=json") assert.Equal(t, "", stderr.String()) var parsedStdout []map[string]any err := json.Unmarshal(stdout.Bytes(), &parsedStdout) @@ -145,7 +152,7 @@ func TestAccFsLsOnEmptyDir(t *testing.T) { } } -func TestAccFsLsForNonexistingDir(t *testing.T) { +func TestFsLsForNonexistingDir(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -154,20 +161,20 @@ func TestAccFsLsForNonexistingDir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() _, tmpDir := tc.setupFiler(t) - _, _, err := RequireErrorRun(t, "fs", "ls", path.Join(tmpDir, "nonexistent"), "--output=json") + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "ls", path.Join(tmpDir, "nonexistent"), "--output=json") assert.ErrorIs(t, err, fs.ErrNotExist) assert.Regexp(t, regexp.MustCompile("no such directory: .*/nonexistent"), err.Error()) }) } } -func TestAccFsLsWithoutScheme(t *testing.T) { +func TestFsLsWithoutScheme(t *testing.T) { t.Parallel() - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - _, _, err := RequireErrorRun(t, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json") + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "ls", "/path-without-a-dbfs-scheme", "--output=json") assert.ErrorIs(t, err, fs.ErrNotExist) } diff --git a/integration/cmd/fs/main_test.go b/integration/cmd/fs/main_test.go new file mode 100644 index 000000000..b9402f0b2 --- /dev/null +++ b/integration/cmd/fs/main_test.go @@ -0,0 +1,13 @@ +package fs_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/fs_mkdir_test.go b/integration/cmd/fs/mkdir_test.go similarity index 75% rename from internal/fs_mkdir_test.go rename to integration/cmd/fs/mkdir_test.go index 9191f6143..f332bb526 100644 --- a/internal/fs_mkdir_test.go +++ b/integration/cmd/fs/mkdir_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -7,12 +7,13 @@ import ( "strings" "testing" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccFsMkdir(t *testing.T) { +func TestFsMkdir(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -21,10 +22,11 @@ func TestAccFsMkdir(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) // create directory "a" - stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "mkdir", path.Join(tmpDir, "a")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) @@ -37,7 +39,7 @@ func TestAccFsMkdir(t *testing.T) { } } -func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) { +func TestFsMkdirCreatesIntermediateDirectories(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -46,10 +48,11 @@ func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) // create directory "a/b/c" - stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a", "b", "c")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "mkdir", path.Join(tmpDir, "a", "b", "c")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) @@ -74,7 +77,7 @@ func TestAccFsMkdirCreatesIntermediateDirectories(t *testing.T) { } } -func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { +func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -83,6 +86,7 @@ func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) // create directory "a" @@ -90,19 +94,20 @@ func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { require.NoError(t, err) // assert run is successful without any errors - stdout, stderr := RequireSuccessfulRun(t, "fs", "mkdir", path.Join(tmpDir, "a")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "mkdir", path.Join(tmpDir, "a")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) }) } } -func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { +func TestFsMkdirWhenFileExistsAtPath(t *testing.T) { t.Parallel() t.Run("dbfs", func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := setupDbfsFiler(t) // create file "hello" @@ -110,7 +115,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { require.NoError(t, err) // assert mkdir fails - _, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello")) + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "mkdir", path.Join(tmpDir, "hello")) // Different cloud providers or cloud configurations return different errors. regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$|(^|: )"The specified path already exists.".*$`) @@ -120,6 +125,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { t.Run("uc-volumes", func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := setupUcVolumesFiler(t) // create file "hello" @@ -127,7 +133,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { require.NoError(t, err) // assert mkdir fails - _, _, err = RequireErrorRun(t, "fs", "mkdir", path.Join(tmpDir, "hello")) + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "mkdir", path.Join(tmpDir, "hello")) assert.ErrorAs(t, err, &filer.FileAlreadyExistsError{}) }) diff --git a/internal/fs_rm_test.go b/integration/cmd/fs/rm_test.go similarity index 77% rename from internal/fs_rm_test.go rename to integration/cmd/fs/rm_test.go index e86f5713b..018c7920e 100644 --- a/internal/fs_rm_test.go +++ b/integration/cmd/fs/rm_test.go @@ -1,4 +1,4 @@ -package internal +package fs_test import ( "context" @@ -7,12 +7,13 @@ import ( "strings" "testing" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccFsRmFile(t *testing.T) { +func TestFsRmFile(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -22,6 +23,7 @@ func TestAccFsRmFile(t *testing.T) { t.Parallel() // Create a file + ctx := context.Background() f, tmpDir := tc.setupFiler(t) err := f.Write(context.Background(), "hello.txt", strings.NewReader("abcd"), filer.CreateParentDirectories) require.NoError(t, err) @@ -31,7 +33,7 @@ func TestAccFsRmFile(t *testing.T) { assert.NoError(t, err) // Run rm command - stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "hello.txt")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "rm", path.Join(tmpDir, "hello.txt")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) @@ -42,7 +44,7 @@ func TestAccFsRmFile(t *testing.T) { } } -func TestAccFsRmEmptyDir(t *testing.T) { +func TestFsRmEmptyDir(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -52,6 +54,7 @@ func TestAccFsRmEmptyDir(t *testing.T) { t.Parallel() // Create a directory + ctx := context.Background() f, tmpDir := tc.setupFiler(t) err := f.Mkdir(context.Background(), "a") require.NoError(t, err) @@ -61,7 +64,7 @@ func TestAccFsRmEmptyDir(t *testing.T) { assert.NoError(t, err) // Run rm command - stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "rm", path.Join(tmpDir, "a")) assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) @@ -72,7 +75,7 @@ func TestAccFsRmEmptyDir(t *testing.T) { } } -func TestAccFsRmNonEmptyDirectory(t *testing.T) { +func TestFsRmNonEmptyDirectory(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -82,6 +85,7 @@ func TestAccFsRmNonEmptyDirectory(t *testing.T) { t.Parallel() // Create a directory + ctx := context.Background() f, tmpDir := tc.setupFiler(t) err := f.Mkdir(context.Background(), "a") require.NoError(t, err) @@ -95,14 +99,14 @@ func TestAccFsRmNonEmptyDirectory(t *testing.T) { assert.NoError(t, err) // Run rm command - _, _, err = RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "a")) + _, _, err = testcli.RequireErrorRun(t, ctx, "fs", "rm", path.Join(tmpDir, "a")) assert.ErrorIs(t, err, fs.ErrInvalid) assert.ErrorAs(t, err, &filer.DirectoryNotEmptyError{}) }) } } -func TestAccFsRmForNonExistentFile(t *testing.T) { +func TestFsRmForNonExistentFile(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -111,17 +115,17 @@ func TestAccFsRmForNonExistentFile(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() _, tmpDir := tc.setupFiler(t) // Expect error if file does not exist - _, _, err := RequireErrorRun(t, "fs", "rm", path.Join(tmpDir, "does-not-exist")) + _, _, err := testcli.RequireErrorRun(t, ctx, "fs", "rm", path.Join(tmpDir, "does-not-exist")) assert.ErrorIs(t, err, fs.ErrNotExist) }) } - } -func TestAccFsRmDirRecursively(t *testing.T) { +func TestFsRmDirRecursively(t *testing.T) { t.Parallel() for _, testCase := range fsTests { @@ -130,6 +134,7 @@ func TestAccFsRmDirRecursively(t *testing.T) { t.Run(tc.name, func(t *testing.T) { t.Parallel() + ctx := context.Background() f, tmpDir := tc.setupFiler(t) // Create a directory @@ -145,7 +150,7 @@ func TestAccFsRmDirRecursively(t *testing.T) { assert.NoError(t, err) // Run rm command - stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", path.Join(tmpDir, "a"), "--recursive") + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "fs", "rm", path.Join(tmpDir, "a"), "--recursive") assert.Equal(t, "", stderr.String()) assert.Equal(t, "", stdout.String()) diff --git a/integration/cmd/jobs/jobs_test.go b/integration/cmd/jobs/jobs_test.go new file mode 100644 index 000000000..b6bcfc5b3 --- /dev/null +++ b/integration/cmd/jobs/jobs_test.go @@ -0,0 +1,24 @@ +package jobs_test + +import ( + "context" + "encoding/json" + "fmt" + "testing" + + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestCreateJob(t *testing.T) { + testutil.Require(t, testutil.Azure) + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "jobs", "create", "--json", "@testdata/create_job_without_workers.json", "--log-level=debug") + assert.Empty(t, stderr.String()) + var output map[string]int + err := json.Unmarshal(stdout.Bytes(), &output) + require.NoError(t, err) + testcli.RequireSuccessfulRun(t, ctx, "jobs", "delete", fmt.Sprint(output["job_id"]), "--log-level=debug") +} diff --git a/integration/cmd/jobs/main_test.go b/integration/cmd/jobs/main_test.go new file mode 100644 index 000000000..46369a526 --- /dev/null +++ b/integration/cmd/jobs/main_test.go @@ -0,0 +1,13 @@ +package jobs_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/testjsons/create_job_without_workers.json b/integration/cmd/jobs/testdata/create_job_without_workers.json similarity index 100% rename from internal/testjsons/create_job_without_workers.json rename to integration/cmd/jobs/testdata/create_job_without_workers.json diff --git a/integration/cmd/main_test.go b/integration/cmd/main_test.go new file mode 100644 index 000000000..a1a5586b6 --- /dev/null +++ b/integration/cmd/main_test.go @@ -0,0 +1,13 @@ +package cmd_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/integration/cmd/repos/main_test.go b/integration/cmd/repos/main_test.go new file mode 100644 index 000000000..7eaa174bc --- /dev/null +++ b/integration/cmd/repos/main_test.go @@ -0,0 +1,13 @@ +package repos_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/repos_test.go b/integration/cmd/repos/repos_test.go similarity index 55% rename from internal/repos_test.go rename to integration/cmd/repos/repos_test.go index 1ad0e8775..b5ad120d6 100644 --- a/internal/repos_test.go +++ b/integration/cmd/repos/repos_test.go @@ -1,4 +1,4 @@ -package internal +package repos_test import ( "context" @@ -6,6 +6,9 @@ import ( "strconv" "testing" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/workspace" @@ -13,10 +16,12 @@ import ( "github.com/stretchr/testify/require" ) +const repoUrl = "https://github.com/databricks/databricks-empty-ide-project.git" + func synthesizeTemporaryRepoPath(t *testing.T, w *databricks.WorkspaceClient, ctx context.Context) string { me, err := w.CurrentUser.Me(ctx) require.NoError(t, err) - repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-integration-")) + repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-integration-")) // Cleanup if repo was created at specified path. t.Cleanup(func() { @@ -43,15 +48,12 @@ func createTemporaryRepo(t *testing.T, w *databricks.WorkspaceClient, ctx contex return repoInfo.Id, repoPath } -func TestAccReposCreateWithProvider(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposCreateWithProvider(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoPath := synthesizeTemporaryRepoPath(t, w, ctx) - _, stderr := RequireSuccessfulRun(t, "repos", "create", repoUrl, "gitHub", "--path", repoPath) + _, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "create", repoUrl, "gitHub", "--path", repoPath) assert.Equal(t, "", stderr.String()) // Confirm the repo was created. @@ -60,15 +62,12 @@ func TestAccReposCreateWithProvider(t *testing.T) { assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType) } -func TestAccReposCreateWithoutProvider(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposCreateWithoutProvider(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoPath := synthesizeTemporaryRepoPath(t, w, ctx) - _, stderr := RequireSuccessfulRun(t, "repos", "create", repoUrl, "--path", repoPath) + _, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "create", repoUrl, "--path", repoPath) assert.Equal(t, "", stderr.String()) // Confirm the repo was created. @@ -77,90 +76,78 @@ func TestAccReposCreateWithoutProvider(t *testing.T) { assert.Equal(t, workspace.ObjectTypeRepo, oi.ObjectType) } -func TestAccReposGet(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposGet(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoId, repoPath := createTemporaryRepo(t, w, ctx) // Get by ID - byIdOutput, stderr := RequireSuccessfulRun(t, "repos", "get", strconv.FormatInt(repoId, 10), "--output=json") + byIdOutput, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "get", strconv.FormatInt(repoId, 10), "--output=json") assert.Equal(t, "", stderr.String()) // Get by path - byPathOutput, stderr := RequireSuccessfulRun(t, "repos", "get", repoPath, "--output=json") + byPathOutput, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "get", repoPath, "--output=json") assert.Equal(t, "", stderr.String()) // Output should be the same assert.Equal(t, byIdOutput.String(), byPathOutput.String()) // Get by path fails - _, stderr, err = RequireErrorRun(t, "repos", "get", repoPath+"-doesntexist", "--output=json") + _, stderr, err := testcli.RequireErrorRun(t, ctx, "repos", "get", repoPath+"-doesntexist", "--output=json") assert.ErrorContains(t, err, "failed to look up repo") // Get by path resolves to something other than a repo - _, stderr, err = RequireErrorRun(t, "repos", "get", "/Repos", "--output=json") + _, stderr, err = testcli.RequireErrorRun(t, ctx, "repos", "get", "/Repos", "--output=json") assert.ErrorContains(t, err, "is not a repo") } -func TestAccReposUpdate(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposUpdate(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoId, repoPath := createTemporaryRepo(t, w, ctx) // Update by ID - byIdOutput, stderr := RequireSuccessfulRun(t, "repos", "update", strconv.FormatInt(repoId, 10), "--branch", "ide") + byIdOutput, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "update", strconv.FormatInt(repoId, 10), "--branch", "ide") assert.Equal(t, "", stderr.String()) // Update by path - byPathOutput, stderr := RequireSuccessfulRun(t, "repos", "update", repoPath, "--branch", "ide") + byPathOutput, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "update", repoPath, "--branch", "ide") assert.Equal(t, "", stderr.String()) // Output should be the same assert.Equal(t, byIdOutput.String(), byPathOutput.String()) } -func TestAccReposDeleteByID(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposDeleteByID(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoId, _ := createTemporaryRepo(t, w, ctx) // Delete by ID - stdout, stderr := RequireSuccessfulRun(t, "repos", "delete", strconv.FormatInt(repoId, 10)) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "delete", strconv.FormatInt(repoId, 10)) assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stderr.String()) // Check it was actually deleted - _, err = w.Repos.GetByRepoId(ctx, repoId) + _, err := w.Repos.GetByRepoId(ctx, repoId) assert.True(t, apierr.IsMissing(err), err) } -func TestAccReposDeleteByPath(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestReposDeleteByPath(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W repoId, repoPath := createTemporaryRepo(t, w, ctx) // Delete by path - stdout, stderr := RequireSuccessfulRun(t, "repos", "delete", repoPath) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "repos", "delete", repoPath) assert.Equal(t, "", stdout.String()) assert.Equal(t, "", stderr.String()) // Check it was actually deleted - _, err = w.Repos.GetByRepoId(ctx, repoId) + _, err := w.Repos.GetByRepoId(ctx, repoId) assert.True(t, apierr.IsMissing(err), err) } diff --git a/integration/cmd/secrets/main_test.go b/integration/cmd/secrets/main_test.go new file mode 100644 index 000000000..a44d30671 --- /dev/null +++ b/integration/cmd/secrets/main_test.go @@ -0,0 +1,13 @@ +package secrets_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/secrets_test.go b/integration/cmd/secrets/secrets_test.go similarity index 78% rename from internal/secrets_test.go rename to integration/cmd/secrets/secrets_test.go index 59e5d6150..4dd133c25 100644 --- a/internal/secrets_test.go +++ b/integration/cmd/secrets/secrets_test.go @@ -1,4 +1,4 @@ -package internal +package secrets_test import ( "context" @@ -7,18 +7,21 @@ import ( "testing" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestSecretsCreateScopeErrWhenNoArguments(t *testing.T) { - _, _, err := RequireErrorRun(t, "secrets", "create-scope") + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "secrets", "create-scope") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") } func temporarySecretScope(ctx context.Context, t *acc.WorkspaceT) string { - scope := acc.RandomName("cli-acc-") + scope := testutil.RandomName("cli-acc-") err := t.W.Secrets.CreateScope(ctx, workspace.CreateScope{ Scope: scope, }) @@ -61,13 +64,13 @@ func assertSecretBytesValue(t *acc.WorkspaceT, scope, key string, expected []byt assert.Equal(t, expected, decoded) } -func TestAccSecretsPutSecretStringValue(tt *testing.T) { +func TestSecretsPutSecretStringValue(tt *testing.T) { ctx, t := acc.WorkspaceTest(tt) scope := temporarySecretScope(ctx, t) key := "test-key" value := "test-value\nwith-newlines\n" - stdout, stderr := RequireSuccessfulRun(t.T, "secrets", "put-secret", scope, key, "--string-value", value) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "secrets", "put-secret", scope, key, "--string-value", value) assert.Empty(t, stdout) assert.Empty(t, stderr) @@ -75,13 +78,13 @@ func TestAccSecretsPutSecretStringValue(tt *testing.T) { assertSecretBytesValue(t, scope, key, []byte(value)) } -func TestAccSecretsPutSecretBytesValue(tt *testing.T) { +func TestSecretsPutSecretBytesValue(tt *testing.T) { ctx, t := acc.WorkspaceTest(tt) scope := temporarySecretScope(ctx, t) key := "test-key" value := []byte{0x00, 0x01, 0x02, 0x03} - stdout, stderr := RequireSuccessfulRun(t.T, "secrets", "put-secret", scope, key, "--bytes-value", string(value)) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "secrets", "put-secret", scope, key, "--bytes-value", string(value)) assert.Empty(t, stdout) assert.Empty(t, stderr) diff --git a/integration/cmd/storage_credentials/main_test.go b/integration/cmd/storage_credentials/main_test.go new file mode 100644 index 000000000..14d00d966 --- /dev/null +++ b/integration/cmd/storage_credentials/main_test.go @@ -0,0 +1,13 @@ +package storage_credentials_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/integration/cmd/storage_credentials/storage_credentials_test.go b/integration/cmd/storage_credentials/storage_credentials_test.go new file mode 100644 index 000000000..73727a875 --- /dev/null +++ b/integration/cmd/storage_credentials/storage_credentials_test.go @@ -0,0 +1,21 @@ +package storage_credentials_test + +import ( + "testing" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestStorageCredentialsListRendersResponse(t *testing.T) { + ctx, _ := acc.WorkspaceTest(t) + + // Check if metastore is assigned for the workspace, otherwise test will fail + t.Log(testutil.GetEnvOrSkipTest(t, "TEST_METASTORE_ID")) + + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "storage-credentials", "list") + assert.NotEmpty(t, stdout) + assert.Empty(t, stderr) +} diff --git a/integration/cmd/sync/main_test.go b/integration/cmd/sync/main_test.go new file mode 100644 index 000000000..8d9f3ca25 --- /dev/null +++ b/integration/cmd/sync/main_test.go @@ -0,0 +1,13 @@ +package sync_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/sync_test.go b/integration/cmd/sync/sync_test.go similarity index 86% rename from internal/sync_test.go rename to integration/cmd/sync/sync_test.go index 6f8b1827b..077a06079 100644 --- a/internal/sync_test.go +++ b/integration/cmd/sync/sync_test.go @@ -1,4 +1,4 @@ -package internal +package sync_test import ( "context" @@ -15,7 +15,9 @@ import ( "testing" "time" - _ "github.com/databricks/cli/cmd/sync" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/sync" "github.com/databricks/cli/libs/testfile" @@ -36,7 +38,7 @@ var ( func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Context) (localRoot, remoteRoot string) { me, err := wsc.CurrentUser.Me(ctx) require.NoError(t, err) - repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("empty-repo-sync-integration-")) + repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("empty-repo-sync-integration-")) repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ Path: repoPath, @@ -63,19 +65,19 @@ func setupRepo(t *testing.T, wsc *databricks.WorkspaceClient, ctx context.Contex type syncTest struct { t *testing.T - c *cobraTestRunner + c *testcli.Runner w *databricks.WorkspaceClient f filer.Filer localRoot string remoteRoot string } -func setupSyncTest(t *testing.T, args ...string) *syncTest { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) +func setupSyncTest(t *testing.T, args ...string) (context.Context, *syncTest) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W - w := databricks.Must(databricks.NewWorkspaceClient()) localRoot := t.TempDir() - remoteRoot := TemporaryWorkspaceDir(t, w) + remoteRoot := acc.TemporaryWorkspaceDir(wt, "sync-") f, err := filer.NewWorkspaceFilesClient(w, remoteRoot) require.NoError(t, err) @@ -88,10 +90,10 @@ func setupSyncTest(t *testing.T, args ...string) *syncTest { "json", }, args...) - c := NewCobraTestRunner(t, args...) + c := testcli.NewRunner(t, ctx, args...) c.RunBackground() - return &syncTest{ + return ctx, &syncTest{ t: t, c: c, w: w, @@ -109,7 +111,7 @@ func (s *syncTest) waitForCompletionMarker() { select { case <-ctx.Done(): s.t.Fatal("timed out waiting for sync to complete") - case line := <-s.c.stdoutLines: + case line := <-s.c.StdoutLines: var event sync.EventBase err := json.Unmarshal([]byte(line), &event) require.NoError(s.t, err) @@ -145,7 +147,7 @@ func (a *syncTest) remoteDirContent(ctx context.Context, relativeDir string, exp } } -func (a *syncTest) remoteFileContent(ctx context.Context, relativePath string, expectedContent string) { +func (a *syncTest) remoteFileContent(ctx context.Context, relativePath, expectedContent string) { filePath := path.Join(a.remoteRoot, relativePath) // Remove leading "/" so we can use it in the URL. @@ -181,7 +183,7 @@ func (a *syncTest) touchFile(ctx context.Context, path string) { require.NoError(a.t, err) } -func (a *syncTest) objectType(ctx context.Context, relativePath string, expected string) { +func (a *syncTest) objectType(ctx context.Context, relativePath, expected string) { path := path.Join(a.remoteRoot, relativePath) a.c.Eventually(func() bool { @@ -193,7 +195,7 @@ func (a *syncTest) objectType(ctx context.Context, relativePath string, expected }, 30*time.Second, 5*time.Second) } -func (a *syncTest) language(ctx context.Context, relativePath string, expected string) { +func (a *syncTest) language(ctx context.Context, relativePath, expected string) { path := path.Join(a.remoteRoot, relativePath) a.c.Eventually(func() bool { @@ -228,9 +230,8 @@ func (a *syncTest) snapshotContains(files []string) { assert.Equal(a.t, len(files), len(s.LastModifiedTimes)) } -func TestAccSyncFullFileSync(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--full", "--watch") +func TestSyncFullFileSync(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--full", "--watch") // .gitignore is created by the sync process to enforce .databricks is not synced assertSync.waitForCompletionMarker() @@ -260,9 +261,8 @@ func TestAccSyncFullFileSync(t *testing.T) { assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) } -func TestAccSyncIncrementalFileSync(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncIncrementalFileSync(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // .gitignore is created by the sync process to enforce .databricks is not synced assertSync.waitForCompletionMarker() @@ -294,9 +294,8 @@ func TestAccSyncIncrementalFileSync(t *testing.T) { assertSync.snapshotContains(append(repoFiles, ".gitignore")) } -func TestAccSyncNestedFolderSync(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncNestedFolderSync(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // .gitignore is created by the sync process to enforce .databricks is not synced assertSync.waitForCompletionMarker() @@ -322,9 +321,8 @@ func TestAccSyncNestedFolderSync(t *testing.T) { assertSync.snapshotContains(append(repoFiles, ".gitignore")) } -func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // .gitignore is created by the sync process to enforce .databricks is not synced assertSync.waitForCompletionMarker() @@ -355,9 +353,8 @@ func TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory(t *testing.T) { assertSync.remoteExists(ctx, "dir1") } -func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // .gitignore is created by the sync process to enforce .databricks is not synced assertSync.waitForCompletionMarker() @@ -391,9 +388,8 @@ func TestAccSyncNestedSpacePlusAndHashAreEscapedSync(t *testing.T) { // // In the above scenario sync should delete the empty folder and add foo to the remote // file system -func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncIncrementalFileOverwritesFolder(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // create foo/bar.txt localFilePath := filepath.Join(assertSync.localRoot, "foo/bar.txt") @@ -421,9 +417,8 @@ func TestAccSyncIncrementalFileOverwritesFolder(t *testing.T) { assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo")) } -func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncIncrementalSyncPythonNotebookToFile(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // create python notebook localFilePath := filepath.Join(assertSync.localRoot, "foo.py") @@ -452,9 +447,8 @@ func TestAccSyncIncrementalSyncPythonNotebookToFile(t *testing.T) { assertSync.snapshotContains(append(repoFiles, ".gitignore")) } -func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncIncrementalSyncFileToPythonNotebook(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // create vanilla python file localFilePath := filepath.Join(assertSync.localRoot, "foo.py") @@ -476,9 +470,8 @@ func TestAccSyncIncrementalSyncFileToPythonNotebook(t *testing.T) { assertSync.snapshotContains(append(repoFiles, ".gitignore", "foo.py")) } -func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) { - ctx := context.Background() - assertSync := setupSyncTest(t, "--watch") +func TestSyncIncrementalSyncPythonNotebookDelete(t *testing.T) { + ctx, assertSync := setupSyncTest(t, "--watch") // create python notebook localFilePath := filepath.Join(assertSync.localRoot, "foo.py") @@ -498,17 +491,15 @@ func TestAccSyncIncrementalSyncPythonNotebookDelete(t *testing.T) { assertSync.remoteDirContent(ctx, "", append(repoFiles, ".gitignore")) } -func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - wsc := databricks.Must(databricks.NewWorkspaceClient()) - ctx := context.Background() +func TestSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + wsc := wt.W me, err := wsc.CurrentUser.Me(ctx) require.NoError(t, err) // Hypothetical repo path doesn't exist. - nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("doesnt-exist-")) + nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("doesnt-exist-")) err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil) assert.ErrorContains(t, err, " does not exist; please create it first") @@ -518,11 +509,10 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { assert.ErrorContains(t, err, " does not exist; please create it first") } -func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) +func TestSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + wsc := wt.W - wsc := databricks.Must(databricks.NewWorkspaceClient()) - ctx := context.Background() _, remoteRepoPath := setupRepo(t, wsc, ctx) // Repo itself is usable. @@ -540,15 +530,14 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { require.Equal(t, workspace.ObjectTypeDirectory, info.ObjectType) } -func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) +func TestSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + wsc := wt.W - wsc := databricks.Must(databricks.NewWorkspaceClient()) - ctx := context.Background() me, err := wsc.CurrentUser.Me(ctx) require.NoError(t, err) - remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("ensure-path-exists-test-")) + remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName("ensure-path-exists-test-")) err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me) assert.NoError(t, err) diff --git a/internal/unknown_command_test.go b/integration/cmd/unknown_command_test.go similarity index 63% rename from internal/unknown_command_test.go rename to integration/cmd/unknown_command_test.go index 62b84027f..fd87a77ff 100644 --- a/internal/unknown_command_test.go +++ b/integration/cmd/unknown_command_test.go @@ -1,13 +1,16 @@ -package internal +package cmd_test import ( + "context" "testing" + "github.com/databricks/cli/internal/testcli" assert "github.com/databricks/cli/libs/dyn/dynassert" ) func TestUnknownCommand(t *testing.T) { - stdout, stderr, err := RequireErrorRun(t, "unknown-command") + ctx := context.Background() + stdout, stderr, err := testcli.RequireErrorRun(t, ctx, "unknown-command") assert.Error(t, err, "unknown command", `unknown command "unknown-command" for "databricks"`) assert.Equal(t, "", stdout.String()) diff --git a/integration/cmd/version/main_test.go b/integration/cmd/version/main_test.go new file mode 100644 index 000000000..4aa5e046a --- /dev/null +++ b/integration/cmd/version/main_test.go @@ -0,0 +1,13 @@ +package version_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/version_test.go b/integration/cmd/version/version_test.go similarity index 66% rename from internal/version_test.go rename to integration/cmd/version/version_test.go index 7dba63cd8..b12974d69 100644 --- a/internal/version_test.go +++ b/integration/cmd/version/version_test.go @@ -1,36 +1,42 @@ -package internal +package version_test import ( + "context" "encoding/json" "fmt" "testing" "github.com/databricks/cli/internal/build" + "github.com/databricks/cli/internal/testcli" "github.com/stretchr/testify/assert" ) var expectedVersion = fmt.Sprintf("Databricks CLI v%s\n", build.GetInfo().Version) func TestVersionFlagShort(t *testing.T) { - stdout, stderr := RequireSuccessfulRun(t, "-v") + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "-v") assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, "", stderr.String()) } func TestVersionFlagLong(t *testing.T) { - stdout, stderr := RequireSuccessfulRun(t, "--version") + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "--version") assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, "", stderr.String()) } func TestVersionCommand(t *testing.T) { - stdout, stderr := RequireSuccessfulRun(t, "version") + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "version") assert.Equal(t, expectedVersion, stdout.String()) assert.Equal(t, "", stderr.String()) } func TestVersionCommandWithJSONOutput(t *testing.T) { - stdout, stderr := RequireSuccessfulRun(t, "version", "--output", "json") + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "version", "--output", "json") assert.NotEmpty(t, stdout.String()) assert.Equal(t, "", stderr.String()) diff --git a/integration/cmd/workspace/main_test.go b/integration/cmd/workspace/main_test.go new file mode 100644 index 000000000..40d140eac --- /dev/null +++ b/integration/cmd/workspace/main_test.go @@ -0,0 +1,13 @@ +package workspace_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/testdata/import_dir/a/b/c/file-b b/integration/cmd/workspace/testdata/import_dir/a/b/c/file-b similarity index 100% rename from internal/testdata/import_dir/a/b/c/file-b rename to integration/cmd/workspace/testdata/import_dir/a/b/c/file-b diff --git a/internal/testdata/import_dir/file-a b/integration/cmd/workspace/testdata/import_dir/file-a similarity index 100% rename from internal/testdata/import_dir/file-a rename to integration/cmd/workspace/testdata/import_dir/file-a diff --git a/internal/testdata/import_dir/jupyterNotebook.ipynb b/integration/cmd/workspace/testdata/import_dir/jupyterNotebook.ipynb similarity index 100% rename from internal/testdata/import_dir/jupyterNotebook.ipynb rename to integration/cmd/workspace/testdata/import_dir/jupyterNotebook.ipynb diff --git a/internal/testdata/import_dir/pyNotebook.py b/integration/cmd/workspace/testdata/import_dir/pyNotebook.py similarity index 100% rename from internal/testdata/import_dir/pyNotebook.py rename to integration/cmd/workspace/testdata/import_dir/pyNotebook.py diff --git a/internal/testdata/import_dir/rNotebook.r b/integration/cmd/workspace/testdata/import_dir/rNotebook.r similarity index 100% rename from internal/testdata/import_dir/rNotebook.r rename to integration/cmd/workspace/testdata/import_dir/rNotebook.r diff --git a/internal/testdata/import_dir/scalaNotebook.scala b/integration/cmd/workspace/testdata/import_dir/scalaNotebook.scala similarity index 100% rename from internal/testdata/import_dir/scalaNotebook.scala rename to integration/cmd/workspace/testdata/import_dir/scalaNotebook.scala diff --git a/internal/testdata/import_dir/sqlNotebook.sql b/integration/cmd/workspace/testdata/import_dir/sqlNotebook.sql similarity index 100% rename from internal/testdata/import_dir/sqlNotebook.sql rename to integration/cmd/workspace/testdata/import_dir/sqlNotebook.sql diff --git a/internal/workspace_test.go b/integration/cmd/workspace/workspace_test.go similarity index 77% rename from internal/workspace_test.go rename to integration/cmd/workspace/workspace_test.go index 445361654..9338f60aa 100644 --- a/internal/workspace_test.go +++ b/integration/cmd/workspace/workspace_test.go @@ -1,4 +1,4 @@ -package internal +package workspace_test import ( "context" @@ -12,17 +12,16 @@ import ( "testing" "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testcli" "github.com/databricks/cli/libs/filer" - "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) -func TestAccWorkspaceList(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - stdout, stderr := RequireSuccessfulRun(t, "workspace", "list", "/") +func TestWorkspaceList(t *testing.T) { + ctx := context.Background() + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "workspace", "list", "/") outStr := stdout.String() assert.Contains(t, outStr, "ID") assert.Contains(t, outStr, "Type") @@ -32,21 +31,22 @@ func TestAccWorkspaceList(t *testing.T) { } func TestWorkpaceListErrorWhenNoArguments(t *testing.T) { - _, _, err := RequireErrorRun(t, "workspace", "list") + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "workspace", "list") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") } func TestWorkpaceGetStatusErrorWhenNoArguments(t *testing.T) { - _, _, err := RequireErrorRun(t, "workspace", "get-status") + ctx := context.Background() + _, _, err := testcli.RequireErrorRun(t, ctx, "workspace", "get-status") assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") } -func TestAccWorkpaceExportPrintsContents(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) +func TestWorkpaceExportPrintsContents(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W - ctx := context.Background() - w := databricks.Must(databricks.NewWorkspaceClient()) - tmpdir := TemporaryWorkspaceDir(t, w) + tmpdir := acc.TemporaryWorkspaceDir(wt, "workspace-export-") f, err := filer.NewWorkspaceFilesClient(w, tmpdir) require.NoError(t, err) @@ -56,29 +56,30 @@ func TestAccWorkpaceExportPrintsContents(t *testing.T) { require.NoError(t, err) // Run export - stdout, stderr := RequireSuccessfulRun(t, "workspace", "export", path.Join(tmpdir, "file-a")) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(tmpdir, "file-a")) assert.Equal(t, contents, stdout.String()) assert.Equal(t, "", stderr.String()) } func setupWorkspaceImportExportTest(t *testing.T) (context.Context, filer.Filer, string) { ctx, wt := acc.WorkspaceTest(t) + w := wt.W - tmpdir := TemporaryWorkspaceDir(t, wt.W) - f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir) + tmpdir := acc.TemporaryWorkspaceDir(wt, "workspace-import-") + f, err := filer.NewWorkspaceFilesClient(w, tmpdir) require.NoError(t, err) return ctx, f, tmpdir } -func assertLocalFileContents(t *testing.T, path string, content string) { +func assertLocalFileContents(t *testing.T, path, content string) { require.FileExists(t, path) b, err := os.ReadFile(path) require.NoError(t, err) assert.Contains(t, string(b), content) } -func assertFilerFileContents(t *testing.T, ctx context.Context, f filer.Filer, path string, content string) { +func assertFilerFileContents(t *testing.T, ctx context.Context, f filer.Filer, path, content string) { r, err := f.Read(ctx, path) require.NoError(t, err) b, err := io.ReadAll(r) @@ -92,7 +93,7 @@ func assertWorkspaceFileType(t *testing.T, ctx context.Context, f filer.Filer, p assert.Equal(t, fileType, info.Sys().(workspace.ObjectInfo).ObjectType) } -func TestAccExportDir(t *testing.T) { +func TestExportDir(t *testing.T) { ctx, f, sourceDir := setupWorkspaceImportExportTest(t) targetDir := t.TempDir() @@ -124,7 +125,7 @@ func TestAccExportDir(t *testing.T) { }, "\n") // Run Export - stdout, stderr := RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir) + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "workspace", "export-dir", sourceDir, targetDir) assert.Equal(t, expectedLogs, stdout.String()) assert.Equal(t, "", stderr.String()) @@ -137,7 +138,7 @@ func TestAccExportDir(t *testing.T) { assertLocalFileContents(t, filepath.Join(targetDir, "a/b/c/file-b"), "def") } -func TestAccExportDirDoesNotOverwrite(t *testing.T) { +func TestExportDirDoesNotOverwrite(t *testing.T) { ctx, f, sourceDir := setupWorkspaceImportExportTest(t) targetDir := t.TempDir() @@ -152,13 +153,13 @@ func TestAccExportDirDoesNotOverwrite(t *testing.T) { require.NoError(t, err) // Run Export - RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir) + testcli.RequireSuccessfulRun(t, ctx, "workspace", "export-dir", sourceDir, targetDir) // Assert file is not overwritten assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "local content") } -func TestAccExportDirWithOverwriteFlag(t *testing.T) { +func TestExportDirWithOverwriteFlag(t *testing.T) { ctx, f, sourceDir := setupWorkspaceImportExportTest(t) targetDir := t.TempDir() @@ -173,15 +174,15 @@ func TestAccExportDirWithOverwriteFlag(t *testing.T) { require.NoError(t, err) // Run Export - RequireSuccessfulRun(t, "workspace", "export-dir", sourceDir, targetDir, "--overwrite") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "export-dir", sourceDir, targetDir, "--overwrite") // Assert file has been overwritten assertLocalFileContents(t, filepath.Join(targetDir, "file-a"), "content from workspace") } -func TestAccImportDir(t *testing.T) { +func TestImportDir(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) - stdout, stderr := RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug") + stdout, stderr := testcli.RequireSuccessfulRun(t, ctx, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--log-level=debug") expectedLogs := strings.Join([]string{ fmt.Sprintf("Importing files from %s", "./testdata/import_dir"), @@ -208,7 +209,7 @@ func TestAccImportDir(t *testing.T) { assertFilerFileContents(t, ctx, workspaceFiler, "jupyterNotebook", "# Databricks notebook source\nprint(\"jupyter\")") } -func TestAccImportDirDoesNotOverwrite(t *testing.T) { +func TestImportDirDoesNotOverwrite(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) var err error @@ -222,7 +223,7 @@ func TestAccImportDirDoesNotOverwrite(t *testing.T) { assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") - RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir) + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import-dir", "./testdata/import_dir", targetDir) // Assert files are imported assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir") @@ -236,7 +237,7 @@ func TestAccImportDirDoesNotOverwrite(t *testing.T) { assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") } -func TestAccImportDirWithOverwriteFlag(t *testing.T) { +func TestImportDirWithOverwriteFlag(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) var err error @@ -250,7 +251,7 @@ func TestAccImportDirWithOverwriteFlag(t *testing.T) { assertFilerFileContents(t, ctx, workspaceFiler, "file-a", "old file") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"old notebook\")") - RequireSuccessfulRun(t, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--overwrite") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import-dir", "./testdata/import_dir", targetDir, "--overwrite") // Assert files are imported assertFilerFileContents(t, ctx, workspaceFiler, "a/b/c/file-b", "file-in-dir") @@ -264,7 +265,7 @@ func TestAccImportDirWithOverwriteFlag(t *testing.T) { assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")") } -func TestAccExport(t *testing.T) { +func TestExport(t *testing.T) { ctx, f, sourceDir := setupWorkspaceImportExportTest(t) var err error @@ -272,7 +273,7 @@ func TestAccExport(t *testing.T) { // Export vanilla file err = f.Write(ctx, "file-a", strings.NewReader("abc")) require.NoError(t, err) - stdout, _ := RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a")) + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "file-a")) b, err := io.ReadAll(&stdout) require.NoError(t, err) assert.Equal(t, "abc", string(b)) @@ -280,20 +281,20 @@ func TestAccExport(t *testing.T) { // Export python notebook err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source")) require.NoError(t, err) - stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook")) + stdout, _ = testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "pyNotebook")) b, err = io.ReadAll(&stdout) require.NoError(t, err) assert.Equal(t, "# Databricks notebook source\n", string(b)) // Export python notebook as jupyter - stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER") + stdout, _ = testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER") b, err = io.ReadAll(&stdout) require.NoError(t, err) assert.Contains(t, string(b), `"cells":`, "jupyter notebooks contain the cells field") assert.Contains(t, string(b), `"metadata":`, "jupyter notebooks contain the metadata field") } -func TestAccExportWithFileFlag(t *testing.T) { +func TestExportWithFileFlag(t *testing.T) { ctx, f, sourceDir := setupWorkspaceImportExportTest(t) localTmpDir := t.TempDir() @@ -302,7 +303,7 @@ func TestAccExportWithFileFlag(t *testing.T) { // Export vanilla file err = f.Write(ctx, "file-a", strings.NewReader("abc")) require.NoError(t, err) - stdout, _ := RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "file-a"), "--file", filepath.Join(localTmpDir, "file.txt")) + stdout, _ := testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "file-a"), "--file", filepath.Join(localTmpDir, "file.txt")) b, err := io.ReadAll(&stdout) require.NoError(t, err) // Expect nothing to be printed to stdout @@ -312,14 +313,14 @@ func TestAccExportWithFileFlag(t *testing.T) { // Export python notebook err = f.Write(ctx, "pyNotebook.py", strings.NewReader("# Databricks notebook source")) require.NoError(t, err) - stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--file", filepath.Join(localTmpDir, "pyNb.py")) + stdout, _ = testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--file", filepath.Join(localTmpDir, "pyNb.py")) b, err = io.ReadAll(&stdout) require.NoError(t, err) assert.Equal(t, "", string(b)) assertLocalFileContents(t, filepath.Join(localTmpDir, "pyNb.py"), "# Databricks notebook source\n") // Export python notebook as jupyter - stdout, _ = RequireSuccessfulRun(t, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER", "--file", filepath.Join(localTmpDir, "jupyterNb.ipynb")) + stdout, _ = testcli.RequireSuccessfulRun(t, ctx, "workspace", "export", path.Join(sourceDir, "pyNotebook"), "--format", "JUPYTER", "--file", filepath.Join(localTmpDir, "jupyterNb.ipynb")) b, err = io.ReadAll(&stdout) require.NoError(t, err) assert.Equal(t, "", string(b)) @@ -327,75 +328,75 @@ func TestAccExportWithFileFlag(t *testing.T) { assertLocalFileContents(t, filepath.Join(localTmpDir, "jupyterNb.ipynb"), `"metadata":`) } -func TestAccImportFileUsingContentFormatSource(t *testing.T) { +func TestImportFileUsingContentFormatSource(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) // Content = `print(1)`. Uploaded as a notebook by default - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyScript"), + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "pyScript"), "--content", base64.StdEncoding.EncodeToString([]byte("print(1)")), "--language=PYTHON") assertFilerFileContents(t, ctx, workspaceFiler, "pyScript", "print(1)") assertWorkspaceFileType(t, ctx, workspaceFiler, "pyScript", workspace.ObjectTypeNotebook) // Import with content = `# Databricks notebook source\nprint(1)`. Uploaded as a notebook with the content just being print(1) - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNb"), + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "pyNb"), "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--language=PYTHON") assertFilerFileContents(t, ctx, workspaceFiler, "pyNb", "print(1)") assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNb", workspace.ObjectTypeNotebook) } -func TestAccImportFileUsingContentFormatAuto(t *testing.T) { +func TestImportFileUsingContentFormatAuto(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) // Content = `# Databricks notebook source\nprint(1)`. Upload as file if path has no extension. - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source\nprint(1)") assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile) // Content = `# Databricks notebook source\nprint(1)`. Upload as notebook if path has py extension - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), "--content", base64.StdEncoding.EncodeToString([]byte("`# Databricks notebook source\nprint(1)")), "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(1)") assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook) // Content = `print(1)`. Upload as file if content is not notebook (even if path has .py extension) - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--content", + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--content", base64.StdEncoding.EncodeToString([]byte("print(1)")), "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "print(1)") assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile) } -func TestAccImportFileFormatSource(t *testing.T) { +func TestImportFileFormatSource(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "pyNotebook"), "--file", "./testdata/import_dir/pyNotebook.py", "--language=PYTHON") assertFilerFileContents(t, ctx, workspaceFiler, "pyNotebook", "# Databricks notebook source\nprint(\"python\")") assertWorkspaceFileType(t, ctx, workspaceFiler, "pyNotebook", workspace.ObjectTypeNotebook) - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala", "--language=SCALA") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala", "--language=SCALA") assertFilerFileContents(t, ctx, workspaceFiler, "scalaNotebook", "// Databricks notebook source\nprintln(\"scala\")") assertWorkspaceFileType(t, ctx, workspaceFiler, "scalaNotebook", workspace.ObjectTypeNotebook) - _, _, err := RequireErrorRun(t, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala") + _, _, err := testcli.RequireErrorRun(t, ctx, "workspace", "import", path.Join(targetDir, "scalaNotebook"), "--file", "./testdata/import_dir/scalaNotebook.scala") assert.ErrorContains(t, err, "The zip file may not be valid or may be an unsupported version. Hint: Objects imported using format=SOURCE are expected to be zip encoded databricks source notebook(s) by default. Please specify a language using the --language flag if you are trying to import a single uncompressed notebook") } -func TestAccImportFileFormatAuto(t *testing.T) { +func TestImportFileFormatAuto(t *testing.T) { ctx, workspaceFiler, targetDir := setupWorkspaceImportExportTest(t) // Upload as file if path has no extension - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "py-nb-as-file"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "# Databricks notebook source") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-file", "print(\"python\")") assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-file", workspace.ObjectTypeFile) // Upload as notebook if path has extension - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "py-nb-as-notebook.py"), "--file", "./testdata/import_dir/pyNotebook.py", "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "py-nb-as-notebook", "# Databricks notebook source\nprint(\"python\")") assertWorkspaceFileType(t, ctx, workspaceFiler, "py-nb-as-notebook", workspace.ObjectTypeNotebook) // Upload as file if content is not notebook (even if path has .py extension) - RequireSuccessfulRun(t, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--file", "./testdata/import_dir/file-a", "--format=AUTO") + testcli.RequireSuccessfulRun(t, ctx, "workspace", "import", path.Join(targetDir, "not-a-notebook.py"), "--file", "./testdata/import_dir/file-a", "--format=AUTO") assertFilerFileContents(t, ctx, workspaceFiler, "not-a-notebook.py", "hello, world") assertWorkspaceFileType(t, ctx, workspaceFiler, "not-a-notebook.py", workspace.ObjectTypeFile) } diff --git a/integration/enforce_convention_test.go b/integration/enforce_convention_test.go new file mode 100644 index 000000000..cc822a6a3 --- /dev/null +++ b/integration/enforce_convention_test.go @@ -0,0 +1,116 @@ +package integration + +import ( + "go/parser" + "go/token" + "os" + "path/filepath" + "strings" + "testing" + "text/template" + + "golang.org/x/exp/maps" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +type packageInfo struct { + Name string + Files []string +} + +func enumeratePackages(t *testing.T) map[string]packageInfo { + pkgmap := make(map[string]packageInfo) + err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } + + // Skip files. + if !info.IsDir() { + return nil + } + + // Skip the root directory and the "internal" directory. + if path == "." || strings.HasPrefix(path, "internal") { + return nil + } + + fset := token.NewFileSet() + pkgs, err := parser.ParseDir(fset, path, nil, parser.ParseComments) + require.NoError(t, err) + if len(pkgs) == 0 { + return nil + } + + // Expect one package per directory. + require.Len(t, pkgs, 1, "Directory %s contains more than one package", path) + v := maps.Values(pkgs)[0] + + // Record the package. + pkgmap[path] = packageInfo{ + Name: v.Name, + Files: maps.Keys(v.Files), + } + return nil + }) + require.NoError(t, err) + return pkgmap +} + +// TestEnforcePackageNames checks that all integration test package names use the "_test" suffix. +// We enforce this package name to avoid package name aliasing. +func TestEnforcePackageNames(t *testing.T) { + pkgmap := enumeratePackages(t) + for _, pkg := range pkgmap { + assert.True(t, strings.HasSuffix(pkg.Name, "_test"), "Package name %s does not end with _test", pkg.Name) + } +} + +var mainTestTemplate = template.Must(template.New("main_test").Parse( + `package {{.Name}} + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} +`)) + +func TestEnforceMainTest(t *testing.T) { + pkgmap := enumeratePackages(t) + for dir, pkg := range pkgmap { + found := false + for _, file := range pkg.Files { + if filepath.Base(file) == "main_test.go" { + found = true + break + } + } + + // Expect a "main_test.go" file in each package. + assert.True(t, found, "Directory %s does not contain a main_test.go file", dir) + } +} + +func TestWriteMainTest(t *testing.T) { + t.Skip("Uncomment to write main_test.go files") + + pkgmap := enumeratePackages(t) + for dir, pkg := range pkgmap { + // Write a "main_test.go" file to the package. + // This file is required to run the integration tests. + f, err := os.Create(filepath.Join(dir, "main_test.go")) + require.NoError(t, err) + defer f.Close() + err = mainTestTemplate.Execute(f, pkg) + require.NoError(t, err) + } +} diff --git a/integration/internal/main.go b/integration/internal/main.go new file mode 100644 index 000000000..6d69dcf70 --- /dev/null +++ b/integration/internal/main.go @@ -0,0 +1,20 @@ +package internal + +import ( + "fmt" + "os" + "testing" +) + +// Main is the entry point for integration tests. +// We use this for all integration tests defined in this subtree to ensure +// they are not inadvertently executed when calling `go test ./...`. +func Main(m *testing.M) { + value := os.Getenv("CLOUD_ENV") + if value == "" { + fmt.Println("CLOUD_ENV is not set, skipping integration tests") + return + } + + m.Run() +} diff --git a/internal/filer_test.go b/integration/libs/filer/filer_test.go similarity index 89% rename from internal/filer_test.go rename to integration/libs/filer/filer_test.go index 4e6a15671..766f9817b 100644 --- a/internal/filer_test.go +++ b/integration/libs/filer/filer_test.go @@ -1,4 +1,4 @@ -package internal +package filer_test import ( "bytes" @@ -12,6 +12,7 @@ import ( "strings" "testing" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -22,7 +23,7 @@ type filerTest struct { filer.Filer } -func (f filerTest) assertContents(ctx context.Context, name string, contents string) { +func (f filerTest) assertContents(ctx context.Context, name, contents string) { reader, err := f.Read(ctx, name) if !assert.NoError(f, err) { return @@ -39,7 +40,7 @@ func (f filerTest) assertContents(ctx context.Context, name string, contents str assert.Equal(f, contents, body.String()) } -func (f filerTest) assertContentsJupyter(ctx context.Context, name string, language string) { +func (f filerTest) assertContentsJupyter(ctx context.Context, name, language string) { reader, err := f.Read(ctx, name) if !assert.NoError(f, err) { return @@ -116,12 +117,12 @@ func commonFilerRecursiveDeleteTest(t *testing.T, ctx context.Context, f filer.F assert.ErrorAs(t, err, &filer.NoSuchDirectoryError{}) } -func TestAccFilerRecursiveDelete(t *testing.T) { +func TestFilerRecursiveDelete(t *testing.T) { t.Parallel() for _, testCase := range []struct { name string - f func(t *testing.T) (filer.Filer, string) + f func(t testutil.TestingT) (filer.Filer, string) }{ {"local", setupLocalFiler}, {"workspace files", setupWsfsFiler}, @@ -227,12 +228,12 @@ func commonFilerReadWriteTests(t *testing.T, ctx context.Context, f filer.Filer) assert.True(t, errors.Is(err, fs.ErrInvalid)) } -func TestAccFilerReadWrite(t *testing.T) { +func TestFilerReadWrite(t *testing.T) { t.Parallel() for _, testCase := range []struct { name string - f func(t *testing.T) (filer.Filer, string) + f func(t testutil.TestingT) (filer.Filer, string) }{ {"local", setupLocalFiler}, {"workspace files", setupWsfsFiler}, @@ -336,12 +337,12 @@ func commonFilerReadDirTest(t *testing.T, ctx context.Context, f filer.Filer) { assert.False(t, entries[0].IsDir()) } -func TestAccFilerReadDir(t *testing.T) { +func TestFilerReadDir(t *testing.T) { t.Parallel() for _, testCase := range []struct { name string - f func(t *testing.T) (filer.Filer, string) + f func(t testutil.TestingT) (filer.Filer, string) }{ {"local", setupLocalFiler}, {"workspace files", setupWsfsFiler}, @@ -361,7 +362,7 @@ func TestAccFilerReadDir(t *testing.T) { } } -func TestAccFilerWorkspaceNotebook(t *testing.T) { +func TestFilerWorkspaceNotebook(t *testing.T) { t.Parallel() ctx := context.Background() @@ -410,33 +411,33 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) { { name: "pythonJupyterNb.ipynb", nameWithoutExt: "pythonJupyterNb", - content1: readFile(t, "testdata/notebooks/py1.ipynb"), + content1: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"), expected1: "# Databricks notebook source\nprint(1)", - content2: readFile(t, "testdata/notebooks/py2.ipynb"), + content2: testutil.ReadFile(t, "testdata/notebooks/py2.ipynb"), expected2: "# Databricks notebook source\nprint(2)", }, { name: "rJupyterNb.ipynb", nameWithoutExt: "rJupyterNb", - content1: readFile(t, "testdata/notebooks/r1.ipynb"), + content1: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"), expected1: "# Databricks notebook source\nprint(1)", - content2: readFile(t, "testdata/notebooks/r2.ipynb"), + content2: testutil.ReadFile(t, "testdata/notebooks/r2.ipynb"), expected2: "# Databricks notebook source\nprint(2)", }, { name: "scalaJupyterNb.ipynb", nameWithoutExt: "scalaJupyterNb", - content1: readFile(t, "testdata/notebooks/scala1.ipynb"), + content1: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"), expected1: "// Databricks notebook source\nprintln(1)", - content2: readFile(t, "testdata/notebooks/scala2.ipynb"), + content2: testutil.ReadFile(t, "testdata/notebooks/scala2.ipynb"), expected2: "// Databricks notebook source\nprintln(2)", }, { name: "sqlJupyterNotebook.ipynb", nameWithoutExt: "sqlJupyterNotebook", - content1: readFile(t, "testdata/notebooks/sql1.ipynb"), + content1: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"), expected1: "-- Databricks notebook source\nselect 1", - content2: readFile(t, "testdata/notebooks/sql2.ipynb"), + content2: testutil.ReadFile(t, "testdata/notebooks/sql2.ipynb"), expected2: "-- Databricks notebook source\nselect 2", }, } @@ -468,10 +469,9 @@ func TestAccFilerWorkspaceNotebook(t *testing.T) { filerTest{t, f}.assertContents(ctx, tc.nameWithoutExt, tc.expected2) }) } - } -func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) { +func TestFilerWorkspaceFilesExtensionsReadDir(t *testing.T) { t.Parallel() files := []struct { @@ -484,13 +484,13 @@ func TestAccFilerWorkspaceFilesExtensionsReadDir(t *testing.T) { {"foo.r", "print('foo')"}, {"foo.scala", "println('foo')"}, {"foo.sql", "SELECT 'foo'"}, - {"py1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")}, + {"py1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")}, {"pyNb.py", "# Databricks notebook source\nprint('first upload'))"}, - {"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")}, + {"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")}, {"rNb.r", "# Databricks notebook source\nprint('first upload'))"}, - {"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")}, + {"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")}, {"scalaNb.scala", "// Databricks notebook source\n println(\"first upload\"))"}, - {"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")}, + {"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")}, {"sqlNb.sql", "-- Databricks notebook source\n SELECT \"first upload\""}, } @@ -555,10 +555,10 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer { }{ {"foo.py", "# Databricks notebook source\nprint('first upload'))"}, {"bar.py", "print('foo')"}, - {"p1.ipynb", readFile(t, "testdata/notebooks/py1.ipynb")}, - {"r1.ipynb", readFile(t, "testdata/notebooks/r1.ipynb")}, - {"scala1.ipynb", readFile(t, "testdata/notebooks/scala1.ipynb")}, - {"sql1.ipynb", readFile(t, "testdata/notebooks/sql1.ipynb")}, + {"p1.ipynb", testutil.ReadFile(t, "testdata/notebooks/py1.ipynb")}, + {"r1.ipynb", testutil.ReadFile(t, "testdata/notebooks/r1.ipynb")}, + {"scala1.ipynb", testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb")}, + {"sql1.ipynb", testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb")}, {"pretender", "not a notebook"}, {"dir/file.txt", "file content"}, {"scala-notebook.scala", "// Databricks notebook source\nprintln('first upload')"}, @@ -575,7 +575,7 @@ func setupFilerWithExtensionsTest(t *testing.T) filer.Filer { return wf } -func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) { +func TestFilerWorkspaceFilesExtensionsRead(t *testing.T) { t.Parallel() ctx := context.Background() @@ -612,7 +612,7 @@ func TestAccFilerWorkspaceFilesExtensionsRead(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) { +func TestFilerWorkspaceFilesExtensionsDelete(t *testing.T) { t.Parallel() ctx := context.Background() @@ -661,7 +661,7 @@ func TestAccFilerWorkspaceFilesExtensionsDelete(t *testing.T) { filerTest{t, wf}.assertNotExists(ctx, "dir") } -func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) { +func TestFilerWorkspaceFilesExtensionsStat(t *testing.T) { t.Parallel() ctx := context.Background() @@ -708,7 +708,7 @@ func TestAccFilerWorkspaceFilesExtensionsStat(t *testing.T) { } } -func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) { +func TestWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) { t.Parallel() ctx := context.Background() @@ -723,14 +723,14 @@ func TestAccWorkspaceFilesExtensionsDirectoriesAreNotNotebooks(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) { +func TestWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) { t.Parallel() ctx := context.Background() wf, _ := setupWsfsExtensionsFiler(t) // Create a notebook - err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) + err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"))) require.NoError(t, err) // Reading foo should fail. Even though the WSFS name for the notebook is foo @@ -742,14 +742,14 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotReadAsFiles(t *testing.T) { assert.NoError(t, err) } -func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) { +func TestWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) { t.Parallel() ctx := context.Background() wf, _ := setupWsfsExtensionsFiler(t) // Create a notebook - err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) + err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"))) require.NoError(t, err) // Stating foo should fail. Even though the WSFS name for the notebook is foo @@ -761,14 +761,14 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotStatAsFiles(t *testing.T) { assert.NoError(t, err) } -func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T) { +func TestWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T) { t.Parallel() ctx := context.Background() wf, _ := setupWsfsExtensionsFiler(t) // Create a notebook - err := wf.Write(ctx, "foo.ipynb", strings.NewReader(readFile(t, "testdata/notebooks/py1.ipynb"))) + err := wf.Write(ctx, "foo.ipynb", strings.NewReader(testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"))) require.NoError(t, err) // Deleting foo should fail. Even though the WSFS name for the notebook is foo @@ -780,7 +780,7 @@ func TestAccWorkspaceFilesExtensionsNotebooksAreNotDeletedAsFiles(t *testing.T) assert.NoError(t, err) } -func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) { +func TestWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) { t.Parallel() // Case 1: Writing source notebooks. @@ -850,25 +850,25 @@ func TestAccWorkspaceFilesExtensions_ExportFormatIsPreserved(t *testing.T) { language: "python", sourceName: "foo.py", jupyterName: "foo.ipynb", - jupyterContent: readFile(t, "testdata/notebooks/py1.ipynb"), + jupyterContent: testutil.ReadFile(t, "testdata/notebooks/py1.ipynb"), }, { language: "r", sourceName: "foo.r", jupyterName: "foo.ipynb", - jupyterContent: readFile(t, "testdata/notebooks/r1.ipynb"), + jupyterContent: testutil.ReadFile(t, "testdata/notebooks/r1.ipynb"), }, { language: "scala", sourceName: "foo.scala", jupyterName: "foo.ipynb", - jupyterContent: readFile(t, "testdata/notebooks/scala1.ipynb"), + jupyterContent: testutil.ReadFile(t, "testdata/notebooks/scala1.ipynb"), }, { language: "sql", sourceName: "foo.sql", jupyterName: "foo.ipynb", - jupyterContent: readFile(t, "testdata/notebooks/sql1.ipynb"), + jupyterContent: testutil.ReadFile(t, "testdata/notebooks/sql1.ipynb"), }, } { t.Run("jupyter_"+tc.language, func(t *testing.T) { diff --git a/integration/libs/filer/helpers_test.go b/integration/libs/filer/helpers_test.go new file mode 100644 index 000000000..2383ae352 --- /dev/null +++ b/integration/libs/filer/helpers_test.go @@ -0,0 +1,73 @@ +package filer_test + +import ( + "errors" + "net/http" + "os" + "path" + "path/filepath" + + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" + + "github.com/databricks/cli/libs/filer" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/stretchr/testify/require" +) + +func setupLocalFiler(t testutil.TestingT) (filer.Filer, string) { + tmp := t.TempDir() + f, err := filer.NewLocalClient(tmp) + require.NoError(t, err) + + return f, path.Join(filepath.ToSlash(tmp)) +} + +func setupWsfsFiler(t testutil.TestingT) (filer.Filer, string) { + ctx, wt := acc.WorkspaceTest(t) + + tmpdir := acc.TemporaryWorkspaceDir(wt) + f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir) + require.NoError(t, err) + + // Check if we can use this API here, skip test if we cannot. + _, err = f.Read(ctx, "we_use_this_call_to_test_if_this_api_is_enabled") + var aerr *apierr.APIError + if errors.As(err, &aerr) && aerr.StatusCode == http.StatusBadRequest { + t.Skip(aerr.Message) + } + + return f, tmpdir +} + +func setupWsfsExtensionsFiler(t testutil.TestingT) (filer.Filer, string) { + _, wt := acc.WorkspaceTest(t) + + tmpdir := acc.TemporaryWorkspaceDir(wt) + f, err := filer.NewWorkspaceFilesExtensionsClient(wt.W, tmpdir) + require.NoError(t, err) + return f, tmpdir +} + +func setupDbfsFiler(t testutil.TestingT) (filer.Filer, string) { + _, wt := acc.WorkspaceTest(t) + + tmpdir := acc.TemporaryDbfsDir(wt) + f, err := filer.NewDbfsClient(wt.W, tmpdir) + require.NoError(t, err) + return f, path.Join("dbfs:/", tmpdir) +} + +func setupUcVolumesFiler(t testutil.TestingT) (filer.Filer, string) { + _, wt := acc.WorkspaceTest(t) + + if os.Getenv("TEST_METASTORE_ID") == "" { + t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") + } + + tmpdir := acc.TemporaryVolume(wt) + f, err := filer.NewFilesClient(wt.W, tmpdir) + require.NoError(t, err) + + return f, path.Join("dbfs:/", tmpdir) +} diff --git a/integration/libs/filer/main_test.go b/integration/libs/filer/main_test.go new file mode 100644 index 000000000..ca866d952 --- /dev/null +++ b/integration/libs/filer/main_test.go @@ -0,0 +1,13 @@ +package filer_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/testdata/notebooks/py1.ipynb b/integration/libs/filer/testdata/notebooks/py1.ipynb similarity index 100% rename from internal/testdata/notebooks/py1.ipynb rename to integration/libs/filer/testdata/notebooks/py1.ipynb diff --git a/internal/testdata/notebooks/py2.ipynb b/integration/libs/filer/testdata/notebooks/py2.ipynb similarity index 100% rename from internal/testdata/notebooks/py2.ipynb rename to integration/libs/filer/testdata/notebooks/py2.ipynb diff --git a/internal/testdata/notebooks/r1.ipynb b/integration/libs/filer/testdata/notebooks/r1.ipynb similarity index 100% rename from internal/testdata/notebooks/r1.ipynb rename to integration/libs/filer/testdata/notebooks/r1.ipynb diff --git a/internal/testdata/notebooks/r2.ipynb b/integration/libs/filer/testdata/notebooks/r2.ipynb similarity index 100% rename from internal/testdata/notebooks/r2.ipynb rename to integration/libs/filer/testdata/notebooks/r2.ipynb diff --git a/internal/testdata/notebooks/scala1.ipynb b/integration/libs/filer/testdata/notebooks/scala1.ipynb similarity index 100% rename from internal/testdata/notebooks/scala1.ipynb rename to integration/libs/filer/testdata/notebooks/scala1.ipynb diff --git a/internal/testdata/notebooks/scala2.ipynb b/integration/libs/filer/testdata/notebooks/scala2.ipynb similarity index 100% rename from internal/testdata/notebooks/scala2.ipynb rename to integration/libs/filer/testdata/notebooks/scala2.ipynb diff --git a/internal/testdata/notebooks/sql1.ipynb b/integration/libs/filer/testdata/notebooks/sql1.ipynb similarity index 100% rename from internal/testdata/notebooks/sql1.ipynb rename to integration/libs/filer/testdata/notebooks/sql1.ipynb diff --git a/internal/testdata/notebooks/sql2.ipynb b/integration/libs/filer/testdata/notebooks/sql2.ipynb similarity index 100% rename from internal/testdata/notebooks/sql2.ipynb rename to integration/libs/filer/testdata/notebooks/sql2.ipynb diff --git a/internal/git_clone_test.go b/integration/libs/git/git_clone_test.go similarity index 83% rename from internal/git_clone_test.go rename to integration/libs/git/git_clone_test.go index 73c3db105..cbc2d091d 100644 --- a/internal/git_clone_test.go +++ b/integration/libs/git/git_clone_test.go @@ -1,4 +1,4 @@ -package internal +package git_test import ( "context" @@ -10,9 +10,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestAccGitClone(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - +func TestGitClone(t *testing.T) { tmpDir := t.TempDir() ctx := context.Background() var err error @@ -32,9 +30,7 @@ func TestAccGitClone(t *testing.T) { assert.Contains(t, string(b), "ide") } -func TestAccGitCloneOnNonDefaultBranch(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - +func TestGitCloneOnNonDefaultBranch(t *testing.T) { tmpDir := t.TempDir() ctx := context.Background() var err error @@ -53,9 +49,7 @@ func TestAccGitCloneOnNonDefaultBranch(t *testing.T) { assert.Contains(t, string(b), "dais-2022") } -func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - +func TestGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { tmpDir := t.TempDir() err := git.Clone(context.Background(), "https://github.com/monalisa/doesnot-exist.git", "", tmpDir) diff --git a/internal/git_fetch_test.go b/integration/libs/git/git_fetch_test.go similarity index 69% rename from internal/git_fetch_test.go rename to integration/libs/git/git_fetch_test.go index 5dab6be76..e53b5469a 100644 --- a/internal/git_fetch_test.go +++ b/integration/libs/git/git_fetch_test.go @@ -1,10 +1,11 @@ -package internal +package git_test import ( "os" "os/exec" "path" "path/filepath" + "strings" "testing" "github.com/databricks/cli/internal/acc" @@ -14,8 +15,10 @@ import ( "github.com/stretchr/testify/require" ) -const examplesRepoUrl = "https://github.com/databricks/bundle-examples" -const examplesRepoProvider = "gitHub" +const ( + examplesRepoUrl = "https://github.com/databricks/bundle-examples" + examplesRepoProvider = "gitHub" +) func assertFullGitInfo(t *testing.T, expectedRoot string, info git.RepositoryInfo) { assert.Equal(t, "main", info.CurrentBranch) @@ -35,19 +38,18 @@ func assertSparseGitInfo(t *testing.T, expectedRoot string, info git.RepositoryI assert.Equal(t, expectedRoot, info.WorktreeRoot) } -func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) { +func ensureWorkspacePrefix(root string) string { + // The fixture helper doesn't include /Workspace, so include it here. + if !strings.HasPrefix(root, "/Workspace/") { + return path.Join("/Workspace", root) + } + return root +} + +func TestFetchRepositoryInfoAPI_FromRepo(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - me, err := wt.W.CurrentUser.Me(ctx) - require.NoError(t, err) + targetPath := ensureWorkspacePrefix(acc.TemporaryRepo(wt, examplesRepoUrl)) - targetPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "/testing-clone-bundle-examples-")) - stdout, stderr := RequireSuccessfulRun(t, "repos", "create", examplesRepoUrl, examplesRepoProvider, "--path", targetPath) - t.Cleanup(func() { - RequireSuccessfulRun(t, "repos", "delete", targetPath) - }) - - assert.Empty(t, stderr.String()) - assert.NotEmpty(t, stdout.String()) ctx = dbr.MockRuntime(ctx, true) for _, inputPath := range []string{ @@ -62,18 +64,14 @@ func TestAccFetchRepositoryInfoAPI_FromRepo(t *testing.T) { } } -func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) { +func TestFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) - me, err := wt.W.CurrentUser.Me(ctx) + rootPath := ensureWorkspacePrefix(acc.TemporaryWorkspaceDir(wt, "testing-nonrepo-")) + + // Create directory inside this root path (this is cleaned up as part of the root path). + err := wt.W.Workspace.MkdirsByPath(ctx, path.Join(rootPath, "a/b/c")) require.NoError(t, err) - rootPath := acc.RandomName(path.Join("/Workspace/Users", me.UserName, "testing-nonrepo-")) - _, stderr := RequireSuccessfulRun(t, "workspace", "mkdirs", path.Join(rootPath, "a/b/c")) - t.Cleanup(func() { - RequireSuccessfulRun(t, "workspace", "delete", "--recursive", rootPath) - }) - - assert.Empty(t, stderr.String()) ctx = dbr.MockRuntime(ctx, true) tests := []struct { @@ -101,14 +99,14 @@ func TestAccFetchRepositoryInfoAPI_FromNonRepo(t *testing.T) { assert.NoError(t, err) } else { assert.Error(t, err) - assert.Contains(t, err.Error(), test.msg) + assert.ErrorContains(t, err, test.msg) } assertEmptyGitInfo(t, info) }) } } -func TestAccFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) { +func TestFetchRepositoryInfoDotGit_FromGitRepo(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) repo := cloneRepoLocally(t, examplesRepoUrl) @@ -135,12 +133,12 @@ func cloneRepoLocally(t *testing.T, repoUrl string) string { return localRoot } -func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) { +func TestFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) tempDir := t.TempDir() root := filepath.Join(tempDir, "repo") - require.NoError(t, os.MkdirAll(filepath.Join(root, "a/b/c"), 0700)) + require.NoError(t, os.MkdirAll(filepath.Join(root, "a/b/c"), 0o700)) tests := []string{ filepath.Join(root, "a/b/c"), @@ -151,20 +149,20 @@ func TestAccFetchRepositoryInfoDotGit_FromNonGitRepo(t *testing.T) { for _, input := range tests { t.Run(input, func(t *testing.T) { info, err := git.FetchRepositoryInfo(ctx, input, wt.W) - assert.NoError(t, err) + assert.ErrorIs(t, err, os.ErrNotExist) assertEmptyGitInfo(t, info) }) } } -func TestAccFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) { +func TestFetchRepositoryInfoDotGit_FromBrokenGitRepo(t *testing.T) { ctx, wt := acc.WorkspaceTest(t) tempDir := t.TempDir() root := filepath.Join(tempDir, "repo") path := filepath.Join(root, "a/b/c") - require.NoError(t, os.MkdirAll(path, 0700)) - require.NoError(t, os.WriteFile(filepath.Join(root, ".git"), []byte(""), 0000)) + require.NoError(t, os.MkdirAll(path, 0o700)) + require.NoError(t, os.WriteFile(filepath.Join(root, ".git"), []byte(""), 0o000)) info, err := git.FetchRepositoryInfo(ctx, path, wt.W) assert.NoError(t, err) diff --git a/integration/libs/git/main_test.go b/integration/libs/git/main_test.go new file mode 100644 index 000000000..5d68e0851 --- /dev/null +++ b/integration/libs/git/main_test.go @@ -0,0 +1,13 @@ +package git_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/locker_test.go b/integration/libs/locker/locker_test.go similarity index 88% rename from internal/locker_test.go rename to integration/libs/locker/locker_test.go index 3ae783d1b..7624afdee 100644 --- a/internal/locker_test.go +++ b/integration/libs/locker/locker_test.go @@ -1,4 +1,4 @@ -package internal +package locker_test import ( "context" @@ -11,6 +11,8 @@ import ( "testing" "time" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" lockpkg "github.com/databricks/cli/libs/locker" "github.com/databricks/databricks-sdk-go" @@ -28,7 +30,7 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr me, err := wsc.CurrentUser.Me(ctx) assert.NoError(t, err) - remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName(projectNamePrefix)) + remoteProjectRoot := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName(projectNamePrefix)) repoInfo, err := wsc.Repos.Create(ctx, workspace.CreateRepoRequest{ Path: remoteProjectRoot, Url: EmptyRepoUrl, @@ -43,11 +45,9 @@ func createRemoteTestProject(t *testing.T, projectNamePrefix string, wsc *databr return remoteProjectRoot } -func TestAccLock(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - ctx := context.TODO() - wsc, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func TestLock(t *testing.T) { + ctx, wt := acc.WorkspaceTest(t) + wsc := wt.W remoteProjectRoot := createRemoteTestProject(t, "lock-acc-", wsc) // 5 lockers try to acquire a lock at the same time @@ -133,7 +133,8 @@ func TestAccLock(t *testing.T) { // assert on active locker content var res map[string]string - json.Unmarshal(b, &res) + err = json.Unmarshal(b, &res) + require.NoError(t, err) assert.NoError(t, err) assert.Equal(t, "Khan", res["surname"]) assert.Equal(t, "Shah Rukh", res["name"]) @@ -162,14 +163,12 @@ func TestAccLock(t *testing.T) { assert.True(t, lockers[indexOfAnInactiveLocker].Active) } -func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer.Filer) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) +func setupLockerTest(t *testing.T) (context.Context, *lockpkg.Locker, filer.Filer) { + ctx, wt := acc.WorkspaceTest(t) + w := wt.W // create temp wsfs dir - tmpDir := TemporaryWorkspaceDir(t, w) + tmpDir := acc.TemporaryWorkspaceDir(wt, "locker-") f, err := filer.NewWorkspaceFilesClient(w, tmpDir) require.NoError(t, err) @@ -177,12 +176,11 @@ func setupLockerTest(ctx context.Context, t *testing.T) (*lockpkg.Locker, filer. locker, err := lockpkg.CreateLocker("redfoo@databricks.com", tmpDir, w) require.NoError(t, err) - return locker, f + return ctx, locker, f } -func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) { - ctx := context.Background() - locker, f := setupLockerTest(ctx, t) +func TestLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) { + ctx, locker, f := setupLockerTest(t) var err error // Acquire lock on tmp directory @@ -202,9 +200,8 @@ func TestAccLockUnlockWithoutAllowsLockFileNotExist(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestAccLockUnlockWithAllowsLockFileNotExist(t *testing.T) { - ctx := context.Background() - locker, f := setupLockerTest(ctx, t) +func TestLockUnlockWithAllowsLockFileNotExist(t *testing.T) { + ctx, locker, f := setupLockerTest(t) var err error // Acquire lock on tmp directory diff --git a/integration/libs/locker/main_test.go b/integration/libs/locker/main_test.go new file mode 100644 index 000000000..33a883768 --- /dev/null +++ b/integration/libs/locker/main_test.go @@ -0,0 +1,13 @@ +package locker_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/integration/libs/tags/main_test.go b/integration/libs/tags/main_test.go new file mode 100644 index 000000000..4eaf54a20 --- /dev/null +++ b/integration/libs/tags/main_test.go @@ -0,0 +1,13 @@ +package tags_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/tags_test.go b/integration/libs/tags/tags_test.go similarity index 84% rename from internal/tags_test.go rename to integration/libs/tags/tags_test.go index 2dd3759ac..b7c47b5f5 100644 --- a/internal/tags_test.go +++ b/integration/libs/tags/tags_test.go @@ -1,41 +1,27 @@ -package internal +package tags_test import ( - "context" "strings" "testing" + "github.com/databricks/cli/internal/acc" "github.com/databricks/cli/internal/testutil" - "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" ) func testTags(t *testing.T, tags map[string]string) error { - var nodeTypeId string - switch testutil.GetCloud(t) { - case testutil.AWS: - nodeTypeId = "i3.xlarge" - case testutil.Azure: - nodeTypeId = "Standard_DS4_v2" - case testutil.GCP: - nodeTypeId = "n1-standard-4" - } - - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) - - ctx := context.Background() - resp, err := w.Jobs.Create(ctx, jobs.CreateJob{ - Name: RandomName("test-tags-"), + ctx, wt := acc.WorkspaceTest(t) + resp, err := wt.W.Jobs.Create(ctx, jobs.CreateJob{ + Name: testutil.RandomName("test-tags-"), Tasks: []jobs.Task{ { TaskKey: "test", NewCluster: &compute.ClusterSpec{ SparkVersion: "13.3.x-scala2.12", NumWorkers: 1, - NodeTypeId: nodeTypeId, + NodeTypeId: testutil.GetCloud(t).NodeTypeID(), }, SparkPythonTask: &jobs.SparkPythonTask{ PythonFile: "/doesnt_exist.py", @@ -47,7 +33,11 @@ func testTags(t *testing.T, tags map[string]string) error { if resp != nil { t.Cleanup(func() { - w.Jobs.DeleteByJobId(ctx, resp.JobId) + _ = wt.W.Jobs.DeleteByJobId(ctx, resp.JobId) + // Cannot enable errchecking there, tests fail with: + // Error: Received unexpected error: + // Job 0 does not exist. + // require.NoError(t, err) }) } @@ -90,7 +80,7 @@ func runTagTestCases(t *testing.T, cases []tagTestCase) { } } -func TestAccTagKeyAWS(t *testing.T) { +func TestTagKeyAWS(t *testing.T) { testutil.Require(t, testutil.AWS) t.Parallel() @@ -122,7 +112,7 @@ func TestAccTagKeyAWS(t *testing.T) { }) } -func TestAccTagValueAWS(t *testing.T) { +func TestTagValueAWS(t *testing.T) { testutil.Require(t, testutil.AWS) t.Parallel() @@ -148,7 +138,7 @@ func TestAccTagValueAWS(t *testing.T) { }) } -func TestAccTagKeyAzure(t *testing.T) { +func TestTagKeyAzure(t *testing.T) { testutil.Require(t, testutil.Azure) t.Parallel() @@ -180,7 +170,7 @@ func TestAccTagKeyAzure(t *testing.T) { }) } -func TestAccTagValueAzure(t *testing.T) { +func TestTagValueAzure(t *testing.T) { testutil.Require(t, testutil.Azure) t.Parallel() @@ -200,7 +190,7 @@ func TestAccTagValueAzure(t *testing.T) { }) } -func TestAccTagKeyGCP(t *testing.T) { +func TestTagKeyGCP(t *testing.T) { testutil.Require(t, testutil.GCP) t.Parallel() @@ -232,7 +222,7 @@ func TestAccTagKeyGCP(t *testing.T) { }) } -func TestAccTagValueGCP(t *testing.T) { +func TestTagValueGCP(t *testing.T) { testutil.Require(t, testutil.GCP) t.Parallel() diff --git a/integration/python/main_test.go b/integration/python/main_test.go new file mode 100644 index 000000000..b35da21e1 --- /dev/null +++ b/integration/python/main_test.go @@ -0,0 +1,13 @@ +package python_test + +import ( + "testing" + + "github.com/databricks/cli/integration/internal" +) + +// TestMain is the entrypoint executed by the test runner. +// See [internal.Main] for prerequisites for running integration tests. +func TestMain(m *testing.M) { + internal.Main(m) +} diff --git a/internal/python/python_tasks_test.go b/integration/python/python_tasks_test.go similarity index 64% rename from internal/python/python_tasks_test.go rename to integration/python/python_tasks_test.go index fde9b37f6..d0a92e084 100644 --- a/internal/python/python_tasks_test.go +++ b/integration/python/python_tasks_test.go @@ -1,4 +1,4 @@ -package python +package python_test import ( "bytes" @@ -14,9 +14,11 @@ import ( "time" "github.com/databricks/cli/bundle/run/output" - "github.com/databricks/cli/internal" + "github.com/databricks/cli/internal/acc" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/filer" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/stretchr/testify/require" @@ -73,10 +75,9 @@ var sparkVersions = []string{ "14.1.x-scala2.12", } -func TestAccRunPythonTaskWorkspace(t *testing.T) { +func TestRunPythonTaskWorkspace(t *testing.T) { // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly - internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") - internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") unsupportedSparkVersionsForWheel := []string{ "11.3.x-scala2.12", @@ -94,10 +95,9 @@ func TestAccRunPythonTaskWorkspace(t *testing.T) { }) } -func TestAccRunPythonTaskDBFS(t *testing.T) { +func TestRunPythonTaskDBFS(t *testing.T) { // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly - internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") - internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") runPythonTasks(t, prepareDBFSFiles(t), testOpts{ name: "Python tasks from DBFS", @@ -107,10 +107,9 @@ func TestAccRunPythonTaskDBFS(t *testing.T) { }) } -func TestAccRunPythonTaskRepo(t *testing.T) { +func TestRunPythonTaskRepo(t *testing.T) { // TODO: remove RUN_PYTHON_TASKS_TEST when ready to be executed as part of nightly - internal.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") - internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + testutil.GetEnvOrSkipTest(t, "RUN_PYTHON_TASKS_TEST") runPythonTasks(t, prepareRepoFiles(t), testOpts{ name: "Python tasks from Repo", @@ -121,19 +120,16 @@ func TestAccRunPythonTaskRepo(t *testing.T) { } func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) { - env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - w := tw.w - nodeTypeId := internal.GetNodeTypeId(env) + nodeTypeId := testutil.GetCloud(t).NodeTypeID() tasks := make([]jobs.SubmitTask, 0) if opts.includeNotebookTasks { - tasks = append(tasks, internal.GenerateNotebookTasks(tw.pyNotebookPath, sparkVersions, nodeTypeId)...) + tasks = append(tasks, GenerateNotebookTasks(tw.pyNotebookPath, sparkVersions, nodeTypeId)...) } if opts.includeSparkPythonTasks { - tasks = append(tasks, internal.GenerateSparkPythonTasks(tw.sparkPythonPath, sparkVersions, nodeTypeId)...) + tasks = append(tasks, GenerateSparkPythonTasks(tw.sparkPythonPath, sparkVersions, nodeTypeId)...) } if opts.includeWheelTasks { @@ -141,7 +137,7 @@ func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) { if len(opts.wheelSparkVersions) > 0 { versions = opts.wheelSparkVersions } - tasks = append(tasks, internal.GenerateWheelTasks(tw.wheelPath, versions, nodeTypeId)...) + tasks = append(tasks, GenerateWheelTasks(tw.wheelPath, versions, nodeTypeId)...) } ctx := context.Background() @@ -178,13 +174,13 @@ func runPythonTasks(t *testing.T, tw *testFiles, opts testOpts) { } func prepareWorkspaceFiles(t *testing.T) *testFiles { - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) + var err error + ctx, wt := acc.WorkspaceTest(t) + w := wt.W + + baseDir := acc.TemporaryWorkspaceDir(wt, "python-tasks-") - baseDir := internal.TemporaryWorkspaceDir(t, w) pyNotebookPath := path.Join(baseDir, "test.py") - err = w.Workspace.Import(ctx, workspace.Import{ Path: pyNotebookPath, Overwrite: true, @@ -224,11 +220,12 @@ func prepareWorkspaceFiles(t *testing.T) *testFiles { } func prepareDBFSFiles(t *testing.T) *testFiles { - ctx := context.Background() - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) + var err error + ctx, wt := acc.WorkspaceTest(t) + w := wt.W + + baseDir := acc.TemporaryDbfsDir(wt, "python-tasks-") - baseDir := internal.TemporaryDbfsDir(t, w) f, err := filer.NewDbfsClient(w, baseDir) require.NoError(t, err) @@ -253,15 +250,83 @@ func prepareDBFSFiles(t *testing.T) *testFiles { } func prepareRepoFiles(t *testing.T) *testFiles { - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) + _, wt := acc.WorkspaceTest(t) + w := wt.W + + baseDir := acc.TemporaryRepo(wt, "https://github.com/databricks/cli") - repo := internal.TemporaryRepo(t, w) packagePath := "internal/python/testdata" return &testFiles{ w: w, - pyNotebookPath: path.Join(repo, packagePath, "test"), - sparkPythonPath: path.Join(repo, packagePath, "spark.py"), - wheelPath: path.Join(repo, packagePath, "my_test_code-0.0.1-py3-none-any.whl"), + pyNotebookPath: path.Join(baseDir, packagePath, "test"), + sparkPythonPath: path.Join(baseDir, packagePath, "spark.py"), + wheelPath: path.Join(baseDir, packagePath, "my_test_code-0.0.1-py3-none-any.whl"), } } + +func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { + tasks := make([]jobs.SubmitTask, 0) + for i := 0; i < len(versions); i++ { + task := jobs.SubmitTask{ + TaskKey: fmt.Sprintf("notebook_%s", strings.ReplaceAll(versions[i], ".", "_")), + NotebookTask: &jobs.NotebookTask{ + NotebookPath: notebookPath, + }, + NewCluster: &compute.ClusterSpec{ + SparkVersion: versions[i], + NumWorkers: 1, + NodeTypeId: nodeTypeId, + DataSecurityMode: compute.DataSecurityModeUserIsolation, + }, + } + tasks = append(tasks, task) + } + + return tasks +} + +func GenerateSparkPythonTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { + tasks := make([]jobs.SubmitTask, 0) + for i := 0; i < len(versions); i++ { + task := jobs.SubmitTask{ + TaskKey: fmt.Sprintf("spark_%s", strings.ReplaceAll(versions[i], ".", "_")), + SparkPythonTask: &jobs.SparkPythonTask{ + PythonFile: notebookPath, + }, + NewCluster: &compute.ClusterSpec{ + SparkVersion: versions[i], + NumWorkers: 1, + NodeTypeId: nodeTypeId, + DataSecurityMode: compute.DataSecurityModeUserIsolation, + }, + } + tasks = append(tasks, task) + } + + return tasks +} + +func GenerateWheelTasks(wheelPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { + tasks := make([]jobs.SubmitTask, 0) + for i := 0; i < len(versions); i++ { + task := jobs.SubmitTask{ + TaskKey: fmt.Sprintf("whl_%s", strings.ReplaceAll(versions[i], ".", "_")), + PythonWheelTask: &jobs.PythonWheelTask{ + PackageName: "my_test_code", + EntryPoint: "run", + }, + NewCluster: &compute.ClusterSpec{ + SparkVersion: versions[i], + NumWorkers: 1, + NodeTypeId: nodeTypeId, + DataSecurityMode: compute.DataSecurityModeUserIsolation, + }, + Libraries: []compute.Library{ + {Whl: wheelPath}, + }, + } + tasks = append(tasks, task) + } + + return tasks +} diff --git a/internal/python/testdata/my_test_code-0.0.1-py3-none-any.whl b/integration/python/testdata/my_test_code-0.0.1-py3-none-any.whl similarity index 100% rename from internal/python/testdata/my_test_code-0.0.1-py3-none-any.whl rename to integration/python/testdata/my_test_code-0.0.1-py3-none-any.whl diff --git a/internal/python/testdata/spark.py b/integration/python/testdata/spark.py similarity index 100% rename from internal/python/testdata/spark.py rename to integration/python/testdata/spark.py diff --git a/internal/python/testdata/test.py b/integration/python/testdata/test.py similarity index 100% rename from internal/python/testdata/test.py rename to integration/python/testdata/test.py diff --git a/internal/acc/debug.go b/internal/acc/debug.go index 116631132..b4939881e 100644 --- a/internal/acc/debug.go +++ b/internal/acc/debug.go @@ -6,7 +6,8 @@ import ( "path" "path/filepath" "strings" - "testing" + + "github.com/databricks/cli/internal/testutil" ) // Detects if test is run from "debug test" feature in VS Code. @@ -16,7 +17,7 @@ func isInDebug() bool { } // Loads debug environment from ~/.databricks/debug-env.json. -func loadDebugEnvIfRunFromIDE(t *testing.T, key string) { +func loadDebugEnvIfRunFromIDE(t testutil.TestingT, key string) { if !isInDebug() { return } diff --git a/internal/acc/fixtures.go b/internal/acc/fixtures.go new file mode 100644 index 000000000..cd867fb3a --- /dev/null +++ b/internal/acc/fixtures.go @@ -0,0 +1,133 @@ +package acc + +import ( + "fmt" + + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/databricks-sdk-go/apierr" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/databricks/databricks-sdk-go/service/files" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/stretchr/testify/require" +) + +func TemporaryWorkspaceDir(t *WorkspaceT, name ...string) string { + ctx := t.ctx + me, err := t.W.CurrentUser.Me(ctx) + require.NoError(t, err) + + // Prefix the name with "integration-test-" to make it easier to identify. + name = append([]string{"integration-test-"}, name...) + basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, testutil.RandomName(name...)) + + t.Logf("Creating workspace directory %s", basePath) + err = t.W.Workspace.MkdirsByPath(ctx, basePath) + require.NoError(t, err) + + // Remove test directory on test completion. + t.Cleanup(func() { + t.Logf("Removing workspace directory %s", basePath) + err := t.W.Workspace.Delete(ctx, workspace.Delete{ + Path: basePath, + Recursive: true, + }) + if err == nil || apierr.IsMissing(err) { + return + } + t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err) + }) + + return basePath +} + +func TemporaryDbfsDir(t *WorkspaceT, name ...string) string { + ctx := t.ctx + + // Prefix the name with "integration-test-" to make it easier to identify. + name = append([]string{"integration-test-"}, name...) + path := fmt.Sprintf("/tmp/%s", testutil.RandomName(name...)) + + t.Logf("Creating DBFS directory %s", path) + err := t.W.Dbfs.MkdirsByPath(ctx, path) + require.NoError(t, err) + + t.Cleanup(func() { + t.Logf("Removing DBFS directory %s", path) + err := t.W.Dbfs.Delete(ctx, files.Delete{ + Path: path, + Recursive: true, + }) + if err == nil || apierr.IsMissing(err) { + return + } + t.Logf("Unable to remove temporary DBFS directory %s: %#v", path, err) + }) + + return path +} + +func TemporaryRepo(t *WorkspaceT, url string) string { + ctx := t.ctx + me, err := t.W.CurrentUser.Me(ctx) + require.NoError(t, err) + + // Prefix the path with "integration-test-" to make it easier to identify. + path := fmt.Sprintf("/Repos/%s/%s", me.UserName, testutil.RandomName("integration-test-")) + + t.Logf("Creating repo: %s", path) + resp, err := t.W.Repos.Create(ctx, workspace.CreateRepoRequest{ + Url: url, + Path: path, + Provider: "gitHub", + }) + require.NoError(t, err) + + t.Cleanup(func() { + t.Logf("Removing repo: %s", path) + err := t.W.Repos.Delete(ctx, workspace.DeleteRepoRequest{ + RepoId: resp.Id, + }) + if err == nil || apierr.IsMissing(err) { + return + } + t.Logf("Unable to remove repo %s: %#v", path, err) + }) + + return path +} + +// Create a new Unity Catalog volume in a catalog called "main" in the workspace. +func TemporaryVolume(t *WorkspaceT) string { + ctx := t.ctx + w := t.W + + // Create a schema + schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{ + CatalogName: "main", + Name: testutil.RandomName("test-schema-"), + }) + require.NoError(t, err) + t.Cleanup(func() { + err := w.Schemas.Delete(ctx, catalog.DeleteSchemaRequest{ + FullName: schema.FullName, + }) + require.NoError(t, err) + }) + + // Create a volume + volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{ + CatalogName: "main", + SchemaName: schema.Name, + Name: "my-volume", + VolumeType: catalog.VolumeTypeManaged, + }) + require.NoError(t, err) + t.Cleanup(func() { + err := w.Volumes.Delete(ctx, catalog.DeleteVolumeRequest{ + Name: volume.FullName, + }) + require.NoError(t, err) + }) + + return fmt.Sprintf("/Volumes/%s/%s/%s", "main", schema.Name, volume.Name) +} diff --git a/internal/acc/workspace.go b/internal/acc/workspace.go index 69ab0e715..2f8a5b8e7 100644 --- a/internal/acc/workspace.go +++ b/internal/acc/workspace.go @@ -2,19 +2,16 @@ package acc import ( "context" - "fmt" "os" - "testing" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/databricks-sdk-go" - "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/compute" - "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/stretchr/testify/require" ) type WorkspaceT struct { - *testing.T + testutil.TestingT W *databricks.WorkspaceClient @@ -23,16 +20,16 @@ type WorkspaceT struct { exec *compute.CommandExecutorV2 } -func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { +func WorkspaceTest(t testutil.TestingT) (context.Context, *WorkspaceT) { loadDebugEnvIfRunFromIDE(t, "workspace") - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) w, err := databricks.NewWorkspaceClient() require.NoError(t, err) wt := &WorkspaceT{ - T: t, + TestingT: t, W: w, @@ -43,10 +40,10 @@ func WorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { } // Run the workspace test only on UC workspaces. -func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { +func UcWorkspaceTest(t testutil.TestingT) (context.Context, *WorkspaceT) { loadDebugEnvIfRunFromIDE(t, "workspace") - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + t.Log(testutil.GetEnvOrSkipTest(t, "CLOUD_ENV")) if os.Getenv("TEST_METASTORE_ID") == "" { t.Skipf("Skipping on non-UC workspaces") @@ -59,7 +56,7 @@ func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { require.NoError(t, err) wt := &WorkspaceT{ - T: t, + TestingT: t, W: w, @@ -70,7 +67,7 @@ func UcWorkspaceTest(t *testing.T) (context.Context, *WorkspaceT) { } func (t *WorkspaceT) TestClusterID() string { - clusterID := GetEnvOrSkipTest(t.T, "TEST_BRICKS_CLUSTER_ID") + clusterID := testutil.GetEnvOrSkipTest(t, "TEST_BRICKS_CLUSTER_ID") err := t.W.Clusters.EnsureClusterIsRunning(t.ctx, clusterID) require.NoError(t, err) return clusterID @@ -97,30 +94,3 @@ func (t *WorkspaceT) RunPython(code string) (string, error) { require.True(t, ok, "unexpected type %T", results.Data) return output, nil } - -func (t *WorkspaceT) TemporaryWorkspaceDir(name ...string) string { - ctx := context.Background() - me, err := t.W.CurrentUser.Me(ctx) - require.NoError(t, err) - - basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName(name...)) - - t.Logf("Creating %s", basePath) - err = t.W.Workspace.MkdirsByPath(ctx, basePath) - require.NoError(t, err) - - // Remove test directory on test completion. - t.Cleanup(func() { - t.Logf("Removing %s", basePath) - err := t.W.Workspace.Delete(ctx, workspace.Delete{ - Path: basePath, - Recursive: true, - }) - if err == nil || apierr.IsMissing(err) { - return - } - t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err) - }) - - return basePath -} diff --git a/internal/alerts_test.go b/internal/alerts_test.go deleted file mode 100644 index 6d7544074..000000000 --- a/internal/alerts_test.go +++ /dev/null @@ -1,14 +0,0 @@ -package internal - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestAccAlertsCreateErrWhenNoArguments(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - _, _, err := RequireErrorRun(t, "alerts-legacy", "create") - assert.Equal(t, "please provide command input in JSON format by specifying the --json flag", err.Error()) -} diff --git a/internal/api_test.go b/internal/api_test.go deleted file mode 100644 index f3e8b7171..000000000 --- a/internal/api_test.go +++ /dev/null @@ -1,51 +0,0 @@ -package internal - -import ( - "encoding/json" - "fmt" - "path" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - _ "github.com/databricks/cli/cmd/api" -) - -func TestAccApiGet(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - stdout, _ := RequireSuccessfulRun(t, "api", "get", "/api/2.0/preview/scim/v2/Me") - - // Deserialize SCIM API response. - var out map[string]any - err := json.Unmarshal(stdout.Bytes(), &out) - require.NoError(t, err) - - // Assert that the output somewhat makes sense for the SCIM API. - assert.Equal(t, true, out["active"]) - assert.NotNil(t, out["id"]) -} - -func TestAccApiPost(t *testing.T) { - env := GetEnvOrSkipTest(t, "CLOUD_ENV") - t.Log(env) - if env == "gcp" { - t.Skip("DBFS REST API is disabled on gcp") - } - - dbfsPath := path.Join("/tmp/databricks/integration", RandomName("api-post")) - requestPath := writeFile(t, "body.json", fmt.Sprintf(`{ - "path": "%s" - }`, dbfsPath)) - - // Post to mkdir - { - RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/mkdirs") - } - - // Post to delete - { - RequireSuccessfulRun(t, "api", "post", "--json=@"+requestPath, "/api/2.0/dbfs/delete") - } -} diff --git a/internal/build/variables.go b/internal/build/variables.go index 197dee9c3..80c4683ab 100644 --- a/internal/build/variables.go +++ b/internal/build/variables.go @@ -1,21 +1,27 @@ package build -var buildProjectName string = "cli" -var buildVersion string = "" +var ( + buildProjectName string = "cli" + buildVersion string = "" +) -var buildBranch string = "undefined" -var buildTag string = "undefined" -var buildShortCommit string = "00000000" -var buildFullCommit string = "0000000000000000000000000000000000000000" -var buildCommitTimestamp string = "0" -var buildSummary string = "v0.0.0" +var ( + buildBranch string = "undefined" + buildTag string = "undefined" + buildShortCommit string = "00000000" + buildFullCommit string = "0000000000000000000000000000000000000000" + buildCommitTimestamp string = "0" + buildSummary string = "v0.0.0" +) -var buildMajor string = "0" -var buildMinor string = "0" -var buildPatch string = "0" -var buildPrerelease string = "" -var buildIsSnapshot string = "false" -var buildTimestamp string = "0" +var ( + buildMajor string = "0" + buildMinor string = "0" + buildPatch string = "0" + buildPrerelease string = "" + buildIsSnapshot string = "false" + buildTimestamp string = "0" +) // This function is used to set the build version for testing purposes. func SetBuildVersion(version string) { diff --git a/internal/bundle/basic_test.go b/internal/bundle/basic_test.go deleted file mode 100644 index c24ef0c05..000000000 --- a/internal/bundle/basic_test.go +++ /dev/null @@ -1,43 +0,0 @@ -package bundle - -import ( - "os" - "path/filepath" - "testing" - - "github.com/databricks/cli/internal" - "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/env" - "github.com/google/uuid" - "github.com/stretchr/testify/require" -) - -func TestAccBasicBundleDeployWithFailOnActiveRuns(t *testing.T) { - ctx, _ := acc.WorkspaceTest(t) - - nodeTypeId := internal.GetNodeTypeId(env.Get(ctx, "CLOUD_ENV")) - uniqueId := uuid.New().String() - root, err := initTestTemplate(t, ctx, "basic", map[string]any{ - "unique_id": uniqueId, - "node_type_id": nodeTypeId, - "spark_version": defaultSparkVersion, - }) - require.NoError(t, err) - - t.Cleanup(func() { - err = destroyBundle(t, ctx, root) - require.NoError(t, err) - }) - - // deploy empty bundle - err = deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"}) - require.NoError(t, err) - - // Remove .databricks directory to simulate a fresh deployment - err = os.RemoveAll(filepath.Join(root, ".databricks")) - require.NoError(t, err) - - // deploy empty bundle again - err = deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"}) - require.NoError(t, err) -} diff --git a/internal/clusters_test.go b/internal/clusters_test.go deleted file mode 100644 index 6daddcce3..000000000 --- a/internal/clusters_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package internal - -import ( - "fmt" - "regexp" - "testing" - - "github.com/stretchr/testify/assert" -) - -var clusterId string - -func TestAccClustersList(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - stdout, stderr := RequireSuccessfulRun(t, "clusters", "list") - outStr := stdout.String() - assert.Contains(t, outStr, "ID") - assert.Contains(t, outStr, "Name") - assert.Contains(t, outStr, "State") - assert.Equal(t, "", stderr.String()) - - idRegExp := regexp.MustCompile(`[0-9]{4}\-[0-9]{6}-[a-z0-9]{8}`) - clusterId = idRegExp.FindString(outStr) - assert.NotEmpty(t, clusterId) -} - -func TestAccClustersGet(t *testing.T) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - stdout, stderr := RequireSuccessfulRun(t, "clusters", "get", clusterId) - outStr := stdout.String() - assert.Contains(t, outStr, fmt.Sprintf(`"cluster_id":"%s"`, clusterId)) - assert.Equal(t, "", stderr.String()) -} - -func TestClusterCreateErrorWhenNoArguments(t *testing.T) { - _, _, err := RequireErrorRun(t, "clusters", "create") - assert.Contains(t, err.Error(), "accepts 1 arg(s), received 0") -} diff --git a/internal/helpers.go b/internal/helpers.go deleted file mode 100644 index 596f45537..000000000 --- a/internal/helpers.go +++ /dev/null @@ -1,623 +0,0 @@ -package internal - -import ( - "bufio" - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "math/rand" - "net/http" - "os" - "path" - "path/filepath" - "reflect" - "strings" - "sync" - "testing" - "time" - - "github.com/databricks/cli/cmd/root" - "github.com/databricks/cli/internal/acc" - "github.com/databricks/cli/libs/flags" - - "github.com/databricks/cli/cmd" - _ "github.com/databricks/cli/cmd/version" - "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/filer" - "github.com/databricks/databricks-sdk-go" - "github.com/databricks/databricks-sdk-go/apierr" - "github.com/databricks/databricks-sdk-go/service/catalog" - "github.com/databricks/databricks-sdk-go/service/compute" - "github.com/databricks/databricks-sdk-go/service/files" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/workspace" - "github.com/spf13/cobra" - "github.com/spf13/pflag" - "github.com/stretchr/testify/require" - - _ "github.com/databricks/cli/cmd/workspace" -) - -const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" - -// GetEnvOrSkipTest proceeds with test only with that env variable -func GetEnvOrSkipTest(t *testing.T, name string) string { - value := os.Getenv(name) - if value == "" { - t.Skipf("Environment variable %s is missing", name) - } - return value -} - -// RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-") -func RandomName(prefix ...string) string { - randLen := 12 - b := make([]byte, randLen) - for i := range b { - b[i] = charset[rand.Intn(randLen)] - } - if len(prefix) > 0 { - return fmt.Sprintf("%s%s", strings.Join(prefix, ""), b) - } - return string(b) -} - -// Helper for running the root command in the background. -// It ensures that the background goroutine terminates upon -// test completion through cancelling the command context. -type cobraTestRunner struct { - *testing.T - - args []string - stdout bytes.Buffer - stderr bytes.Buffer - stdinR *io.PipeReader - stdinW *io.PipeWriter - - ctx context.Context - - // Line-by-line output. - // Background goroutines populate these channels by reading from stdout/stderr pipes. - stdoutLines <-chan string - stderrLines <-chan string - - errch <-chan error -} - -func consumeLines(ctx context.Context, wg *sync.WaitGroup, r io.Reader) <-chan string { - ch := make(chan string, 30000) - wg.Add(1) - go func() { - defer close(ch) - defer wg.Done() - scanner := bufio.NewScanner(r) - for scanner.Scan() { - // We expect to be able to always send these lines into the channel. - // If we can't, it means the channel is full and likely there is a problem - // in either the test or the code under test. - select { - case <-ctx.Done(): - return - case ch <- scanner.Text(): - continue - default: - panic("line buffer is full") - } - } - }() - return ch -} - -func (t *cobraTestRunner) registerFlagCleanup(c *cobra.Command) { - // Find target command that will be run. Example: if the command run is `databricks fs cp`, - // target command corresponds to `cp` - targetCmd, _, err := c.Find(t.args) - if err != nil && strings.HasPrefix(err.Error(), "unknown command") { - // even if command is unknown, we can proceed - require.NotNil(t, targetCmd) - } else { - require.NoError(t, err) - } - - // Force initialization of default flags. - // These are initialized by cobra at execution time and would otherwise - // not be cleaned up by the cleanup function below. - targetCmd.InitDefaultHelpFlag() - targetCmd.InitDefaultVersionFlag() - - // Restore flag values to their original value on test completion. - targetCmd.Flags().VisitAll(func(f *pflag.Flag) { - v := reflect.ValueOf(f.Value) - if v.Kind() == reflect.Ptr { - v = v.Elem() - } - // Store copy of the current flag value. - reset := reflect.New(v.Type()).Elem() - reset.Set(v) - t.Cleanup(func() { - v.Set(reset) - }) - }) -} - -// Like [cobraTestRunner.Eventually], but more specific -func (t *cobraTestRunner) WaitForTextPrinted(text string, timeout time.Duration) { - t.Eventually(func() bool { - currentStdout := t.stdout.String() - return strings.Contains(currentStdout, text) - }, timeout, 50*time.Millisecond) -} - -func (t *cobraTestRunner) WaitForOutput(text string, timeout time.Duration) { - require.Eventually(t.T, func() bool { - currentStdout := t.stdout.String() - currentErrout := t.stderr.String() - return strings.Contains(currentStdout, text) || strings.Contains(currentErrout, text) - }, timeout, 50*time.Millisecond) -} - -func (t *cobraTestRunner) WithStdin() { - reader, writer := io.Pipe() - t.stdinR = reader - t.stdinW = writer -} - -func (t *cobraTestRunner) CloseStdin() { - if t.stdinW == nil { - panic("no standard input configured") - } - t.stdinW.Close() -} - -func (t *cobraTestRunner) SendText(text string) { - if t.stdinW == nil { - panic("no standard input configured") - } - t.stdinW.Write([]byte(text + "\n")) -} - -func (t *cobraTestRunner) RunBackground() { - var stdoutR, stderrR io.Reader - var stdoutW, stderrW io.WriteCloser - stdoutR, stdoutW = io.Pipe() - stderrR, stderrW = io.Pipe() - ctx := cmdio.NewContext(t.ctx, &cmdio.Logger{ - Mode: flags.ModeAppend, - Reader: bufio.Reader{}, - Writer: stderrW, - }) - - cli := cmd.New(ctx) - cli.SetOut(stdoutW) - cli.SetErr(stderrW) - cli.SetArgs(t.args) - if t.stdinW != nil { - cli.SetIn(t.stdinR) - } - - // Register cleanup function to restore flags to their original values - // once test has been executed. This is needed because flag values reside - // in a global singleton data-structure, and thus subsequent tests might - // otherwise interfere with each other - t.registerFlagCleanup(cli) - - errch := make(chan error) - ctx, cancel := context.WithCancel(ctx) - - // Tee stdout/stderr to buffers. - stdoutR = io.TeeReader(stdoutR, &t.stdout) - stderrR = io.TeeReader(stderrR, &t.stderr) - - // Consume stdout/stderr line-by-line. - var wg sync.WaitGroup - t.stdoutLines = consumeLines(ctx, &wg, stdoutR) - t.stderrLines = consumeLines(ctx, &wg, stderrR) - - // Run command in background. - go func() { - err := root.Execute(ctx, cli) - if err != nil { - t.Logf("Error running command: %s", err) - } - - // Close pipes to signal EOF. - stdoutW.Close() - stderrW.Close() - - // Wait for the [consumeLines] routines to finish now that - // the pipes they're reading from have closed. - wg.Wait() - - if t.stdout.Len() > 0 { - // Make a copy of the buffer such that it remains "unread". - scanner := bufio.NewScanner(bytes.NewBuffer(t.stdout.Bytes())) - for scanner.Scan() { - t.Logf("[databricks stdout]: %s", scanner.Text()) - } - } - - if t.stderr.Len() > 0 { - // Make a copy of the buffer such that it remains "unread". - scanner := bufio.NewScanner(bytes.NewBuffer(t.stderr.Bytes())) - for scanner.Scan() { - t.Logf("[databricks stderr]: %s", scanner.Text()) - } - } - - // Reset context on command for the next test. - // These commands are globals so we have to clean up to the best of our ability after each run. - // See https://github.com/spf13/cobra/blob/a6f198b635c4b18fff81930c40d464904e55b161/command.go#L1062-L1066 - //nolint:staticcheck // cobra sets the context and doesn't clear it - cli.SetContext(nil) - - // Make caller aware of error. - errch <- err - close(errch) - }() - - // Ensure command terminates upon test completion (success or failure). - t.Cleanup(func() { - // Signal termination of command. - cancel() - // Wait for goroutine to finish. - <-errch - }) - - t.errch = errch -} - -func (t *cobraTestRunner) Run() (bytes.Buffer, bytes.Buffer, error) { - t.RunBackground() - err := <-t.errch - return t.stdout, t.stderr, err -} - -// Like [require.Eventually] but errors if the underlying command has failed. -func (c *cobraTestRunner) Eventually(condition func() bool, waitFor time.Duration, tick time.Duration, msgAndArgs ...any) { - ch := make(chan bool, 1) - - timer := time.NewTimer(waitFor) - defer timer.Stop() - - ticker := time.NewTicker(tick) - defer ticker.Stop() - - // Kick off condition check immediately. - go func() { ch <- condition() }() - - for tick := ticker.C; ; { - select { - case err := <-c.errch: - require.Fail(c, "Command failed", err) - return - case <-timer.C: - require.Fail(c, "Condition never satisfied", msgAndArgs...) - return - case <-tick: - tick = nil - go func() { ch <- condition() }() - case v := <-ch: - if v { - return - } - tick = ticker.C - } - } -} - -func (t *cobraTestRunner) RunAndExpectOutput(heredoc string) { - stdout, _, err := t.Run() - require.NoError(t, err) - require.Equal(t, cmdio.Heredoc(heredoc), strings.TrimSpace(stdout.String())) -} - -func (t *cobraTestRunner) RunAndParseJSON(v any) { - stdout, _, err := t.Run() - require.NoError(t, err) - err = json.Unmarshal(stdout.Bytes(), &v) - require.NoError(t, err) -} - -func NewCobraTestRunner(t *testing.T, args ...string) *cobraTestRunner { - return &cobraTestRunner{ - T: t, - ctx: context.Background(), - args: args, - } -} - -func NewCobraTestRunnerWithContext(t *testing.T, ctx context.Context, args ...string) *cobraTestRunner { - return &cobraTestRunner{ - T: t, - ctx: ctx, - args: args, - } -} - -func RequireSuccessfulRun(t *testing.T, args ...string) (bytes.Buffer, bytes.Buffer) { - t.Logf("run args: [%s]", strings.Join(args, ", ")) - c := NewCobraTestRunner(t, args...) - stdout, stderr, err := c.Run() - require.NoError(t, err) - return stdout, stderr -} - -func RequireErrorRun(t *testing.T, args ...string) (bytes.Buffer, bytes.Buffer, error) { - c := NewCobraTestRunner(t, args...) - stdout, stderr, err := c.Run() - require.Error(t, err) - return stdout, stderr, err -} - -func readFile(t *testing.T, name string) string { - b, err := os.ReadFile(name) - require.NoError(t, err) - - return string(b) -} - -func writeFile(t *testing.T, name string, body string) string { - f, err := os.Create(filepath.Join(t.TempDir(), name)) - require.NoError(t, err) - _, err = f.WriteString(body) - require.NoError(t, err) - f.Close() - return f.Name() -} - -func GenerateNotebookTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { - tasks := make([]jobs.SubmitTask, 0) - for i := 0; i < len(versions); i++ { - task := jobs.SubmitTask{ - TaskKey: fmt.Sprintf("notebook_%s", strings.ReplaceAll(versions[i], ".", "_")), - NotebookTask: &jobs.NotebookTask{ - NotebookPath: notebookPath, - }, - NewCluster: &compute.ClusterSpec{ - SparkVersion: versions[i], - NumWorkers: 1, - NodeTypeId: nodeTypeId, - DataSecurityMode: compute.DataSecurityModeUserIsolation, - }, - } - tasks = append(tasks, task) - } - - return tasks -} - -func GenerateSparkPythonTasks(notebookPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { - tasks := make([]jobs.SubmitTask, 0) - for i := 0; i < len(versions); i++ { - task := jobs.SubmitTask{ - TaskKey: fmt.Sprintf("spark_%s", strings.ReplaceAll(versions[i], ".", "_")), - SparkPythonTask: &jobs.SparkPythonTask{ - PythonFile: notebookPath, - }, - NewCluster: &compute.ClusterSpec{ - SparkVersion: versions[i], - NumWorkers: 1, - NodeTypeId: nodeTypeId, - DataSecurityMode: compute.DataSecurityModeUserIsolation, - }, - } - tasks = append(tasks, task) - } - - return tasks -} - -func GenerateWheelTasks(wheelPath string, versions []string, nodeTypeId string) []jobs.SubmitTask { - tasks := make([]jobs.SubmitTask, 0) - for i := 0; i < len(versions); i++ { - task := jobs.SubmitTask{ - TaskKey: fmt.Sprintf("whl_%s", strings.ReplaceAll(versions[i], ".", "_")), - PythonWheelTask: &jobs.PythonWheelTask{ - PackageName: "my_test_code", - EntryPoint: "run", - }, - NewCluster: &compute.ClusterSpec{ - SparkVersion: versions[i], - NumWorkers: 1, - NodeTypeId: nodeTypeId, - DataSecurityMode: compute.DataSecurityModeUserIsolation, - }, - Libraries: []compute.Library{ - {Whl: wheelPath}, - }, - } - tasks = append(tasks, task) - } - - return tasks -} - -func TemporaryWorkspaceDir(t *testing.T, w *databricks.WorkspaceClient) string { - ctx := context.Background() - me, err := w.CurrentUser.Me(ctx) - require.NoError(t, err) - - basePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("integration-test-wsfs-")) - - t.Logf("Creating %s", basePath) - err = w.Workspace.MkdirsByPath(ctx, basePath) - require.NoError(t, err) - - // Remove test directory on test completion. - t.Cleanup(func() { - t.Logf("Removing %s", basePath) - err := w.Workspace.Delete(ctx, workspace.Delete{ - Path: basePath, - Recursive: true, - }) - if err == nil || apierr.IsMissing(err) { - return - } - t.Logf("Unable to remove temporary workspace directory %s: %#v", basePath, err) - }) - - return basePath -} - -func TemporaryDbfsDir(t *testing.T, w *databricks.WorkspaceClient) string { - ctx := context.Background() - path := fmt.Sprintf("/tmp/%s", RandomName("integration-test-dbfs-")) - - t.Logf("Creating DBFS folder:%s", path) - err := w.Dbfs.MkdirsByPath(ctx, path) - require.NoError(t, err) - - t.Cleanup(func() { - t.Logf("Removing DBFS folder:%s", path) - err := w.Dbfs.Delete(ctx, files.Delete{ - Path: path, - Recursive: true, - }) - if err == nil || apierr.IsMissing(err) { - return - } - t.Logf("unable to remove temporary dbfs directory %s: %#v", path, err) - }) - - return path -} - -// Create a new UC volume in a catalog called "main" in the workspace. -func TemporaryUcVolume(t *testing.T, w *databricks.WorkspaceClient) string { - ctx := context.Background() - - // Create a schema - schema, err := w.Schemas.Create(ctx, catalog.CreateSchema{ - CatalogName: "main", - Name: RandomName("test-schema-"), - }) - require.NoError(t, err) - t.Cleanup(func() { - w.Schemas.Delete(ctx, catalog.DeleteSchemaRequest{ - FullName: schema.FullName, - }) - }) - - // Create a volume - volume, err := w.Volumes.Create(ctx, catalog.CreateVolumeRequestContent{ - CatalogName: "main", - SchemaName: schema.Name, - Name: "my-volume", - VolumeType: catalog.VolumeTypeManaged, - }) - require.NoError(t, err) - t.Cleanup(func() { - w.Volumes.Delete(ctx, catalog.DeleteVolumeRequest{ - Name: volume.FullName, - }) - }) - - return path.Join("/Volumes", "main", schema.Name, volume.Name) - -} - -func TemporaryRepo(t *testing.T, w *databricks.WorkspaceClient) string { - ctx := context.Background() - me, err := w.CurrentUser.Me(ctx) - require.NoError(t, err) - - repoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("integration-test-repo-")) - - t.Logf("Creating repo:%s", repoPath) - repoInfo, err := w.Repos.Create(ctx, workspace.CreateRepoRequest{ - Url: "https://github.com/databricks/cli", - Provider: "github", - Path: repoPath, - }) - require.NoError(t, err) - - t.Cleanup(func() { - t.Logf("Removing repo: %s", repoPath) - err := w.Repos.Delete(ctx, workspace.DeleteRepoRequest{ - RepoId: repoInfo.Id, - }) - if err == nil || apierr.IsMissing(err) { - return - } - t.Logf("unable to remove repo %s: %#v", repoPath, err) - }) - - return repoPath -} - -func GetNodeTypeId(env string) string { - if env == "gcp" { - return "n1-standard-4" - } else if env == "aws" || env == "ucws" { - // aws-prod-ucws has CLOUD_ENV set to "ucws" - return "i3.xlarge" - } - return "Standard_DS4_v2" -} - -func setupLocalFiler(t *testing.T) (filer.Filer, string) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - tmp := t.TempDir() - f, err := filer.NewLocalClient(tmp) - require.NoError(t, err) - - return f, path.Join(filepath.ToSlash(tmp)) -} - -func setupWsfsFiler(t *testing.T) (filer.Filer, string) { - ctx, wt := acc.WorkspaceTest(t) - - tmpdir := TemporaryWorkspaceDir(t, wt.W) - f, err := filer.NewWorkspaceFilesClient(wt.W, tmpdir) - require.NoError(t, err) - - // Check if we can use this API here, skip test if we cannot. - _, err = f.Read(ctx, "we_use_this_call_to_test_if_this_api_is_enabled") - var aerr *apierr.APIError - if errors.As(err, &aerr) && aerr.StatusCode == http.StatusBadRequest { - t.Skip(aerr.Message) - } - - return f, tmpdir -} - -func setupWsfsExtensionsFiler(t *testing.T) (filer.Filer, string) { - _, wt := acc.WorkspaceTest(t) - - tmpdir := TemporaryWorkspaceDir(t, wt.W) - f, err := filer.NewWorkspaceFilesExtensionsClient(wt.W, tmpdir) - require.NoError(t, err) - - return f, tmpdir -} - -func setupDbfsFiler(t *testing.T) (filer.Filer, string) { - _, wt := acc.WorkspaceTest(t) - - tmpDir := TemporaryDbfsDir(t, wt.W) - f, err := filer.NewDbfsClient(wt.W, tmpDir) - require.NoError(t, err) - - return f, path.Join("dbfs:/", tmpDir) -} - -func setupUcVolumesFiler(t *testing.T) (filer.Filer, string) { - t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - - if os.Getenv("TEST_METASTORE_ID") == "" { - t.Skip("Skipping tests that require a UC Volume when metastore id is not set.") - } - - w, err := databricks.NewWorkspaceClient() - require.NoError(t, err) - - tmpDir := TemporaryUcVolume(t, w) - f, err := filer.NewFilesClient(w, tmpDir) - require.NoError(t, err) - - return f, path.Join("dbfs:/", tmpDir) -} diff --git a/internal/jobs_test.go b/internal/jobs_test.go deleted file mode 100644 index 8513168c8..000000000 --- a/internal/jobs_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package internal - -import ( - "encoding/json" - "fmt" - "testing" - - "github.com/databricks/cli/internal/acc" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestAccCreateJob(t *testing.T) { - acc.WorkspaceTest(t) - env := GetEnvOrSkipTest(t, "CLOUD_ENV") - if env != "azure" { - t.Skipf("Not running test on cloud %s", env) - } - stdout, stderr := RequireSuccessfulRun(t, "jobs", "create", "--json", "@testjsons/create_job_without_workers.json", "--log-level=debug") - assert.Empty(t, stderr.String()) - var output map[string]int - err := json.Unmarshal(stdout.Bytes(), &output) - require.NoError(t, err) - RequireSuccessfulRun(t, "jobs", "delete", fmt.Sprint(output["job_id"]), "--log-level=debug") -} diff --git a/internal/storage_credentials_test.go b/internal/storage_credentials_test.go deleted file mode 100644 index 07c21861f..000000000 --- a/internal/storage_credentials_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package internal - -import ( - "testing" - - "github.com/databricks/cli/internal/acc" - "github.com/stretchr/testify/assert" -) - -func TestAccStorageCredentialsListRendersResponse(t *testing.T) { - _, _ = acc.WorkspaceTest(t) - - // Check if metastore is assigned for the workspace, otherwise test will fail - t.Log(GetEnvOrSkipTest(t, "TEST_METASTORE_ID")) - - stdout, stderr := RequireSuccessfulRun(t, "storage-credentials", "list") - assert.NotEmpty(t, stdout) - assert.Empty(t, stderr) -} diff --git a/internal/testcli/README.md b/internal/testcli/README.md new file mode 100644 index 000000000..b37ae3bc9 --- /dev/null +++ b/internal/testcli/README.md @@ -0,0 +1,7 @@ +# testcli + +This package provides a way to run the CLI from tests as if it were a separate process. +By running the CLI inline we can still set breakpoints and step through execution. + +It transitively imports pretty much this entire repository, which is why we +intentionally keep this package _separate_ from `testutil`. diff --git a/internal/testcli/runner.go b/internal/testcli/runner.go new file mode 100644 index 000000000..95073b57c --- /dev/null +++ b/internal/testcli/runner.go @@ -0,0 +1,306 @@ +package testcli + +import ( + "bufio" + "bytes" + "context" + "encoding/json" + "io" + "reflect" + "strings" + "sync" + "time" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "github.com/stretchr/testify/require" + + "github.com/databricks/cli/cmd" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/internal/testutil" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" +) + +// Helper for running the root command in the background. +// It ensures that the background goroutine terminates upon +// test completion through cancelling the command context. +type Runner struct { + testutil.TestingT + + args []string + stdout bytes.Buffer + stderr bytes.Buffer + stdinR *io.PipeReader + stdinW *io.PipeWriter + + ctx context.Context + + // Line-by-line output. + // Background goroutines populate these channels by reading from stdout/stderr pipes. + StdoutLines <-chan string + StderrLines <-chan string + + errch <-chan error +} + +func consumeLines(ctx context.Context, wg *sync.WaitGroup, r io.Reader) <-chan string { + ch := make(chan string, 30000) + wg.Add(1) + go func() { + defer close(ch) + defer wg.Done() + scanner := bufio.NewScanner(r) + for scanner.Scan() { + // We expect to be able to always send these lines into the channel. + // If we can't, it means the channel is full and likely there is a problem + // in either the test or the code under test. + select { + case <-ctx.Done(): + return + case ch <- scanner.Text(): + continue + default: + panic("line buffer is full") + } + } + }() + return ch +} + +func (r *Runner) registerFlagCleanup(c *cobra.Command) { + // Find target command that will be run. Example: if the command run is `databricks fs cp`, + // target command corresponds to `cp` + targetCmd, _, err := c.Find(r.args) + if err != nil && strings.HasPrefix(err.Error(), "unknown command") { + // even if command is unknown, we can proceed + require.NotNil(r, targetCmd) + } else { + require.NoError(r, err) + } + + // Force initialization of default flags. + // These are initialized by cobra at execution time and would otherwise + // not be cleaned up by the cleanup function below. + targetCmd.InitDefaultHelpFlag() + targetCmd.InitDefaultVersionFlag() + + // Restore flag values to their original value on test completion. + targetCmd.Flags().VisitAll(func(f *pflag.Flag) { + v := reflect.ValueOf(f.Value) + if v.Kind() == reflect.Ptr { + v = v.Elem() + } + // Store copy of the current flag value. + reset := reflect.New(v.Type()).Elem() + reset.Set(v) + r.Cleanup(func() { + v.Set(reset) + }) + }) +} + +// Like [Runner.Eventually], but more specific +func (r *Runner) WaitForTextPrinted(text string, timeout time.Duration) { + r.Eventually(func() bool { + currentStdout := r.stdout.String() + return strings.Contains(currentStdout, text) + }, timeout, 50*time.Millisecond) +} + +func (r *Runner) WaitForOutput(text string, timeout time.Duration) { + require.Eventually(r, func() bool { + currentStdout := r.stdout.String() + currentErrout := r.stderr.String() + return strings.Contains(currentStdout, text) || strings.Contains(currentErrout, text) + }, timeout, 50*time.Millisecond) +} + +func (r *Runner) WithStdin() { + reader, writer := io.Pipe() + r.stdinR = reader + r.stdinW = writer +} + +func (r *Runner) CloseStdin() { + if r.stdinW == nil { + panic("no standard input configured") + } + r.stdinW.Close() +} + +func (r *Runner) SendText(text string) { + if r.stdinW == nil { + panic("no standard input configured") + } + _, err := r.stdinW.Write([]byte(text + "\n")) + if err != nil { + panic("Failed to to write to t.stdinW") + } +} + +func (r *Runner) RunBackground() { + var stdoutR, stderrR io.Reader + var stdoutW, stderrW io.WriteCloser + stdoutR, stdoutW = io.Pipe() + stderrR, stderrW = io.Pipe() + ctx := cmdio.NewContext(r.ctx, &cmdio.Logger{ + Mode: flags.ModeAppend, + Reader: bufio.Reader{}, + Writer: stderrW, + }) + + cli := cmd.New(ctx) + cli.SetOut(stdoutW) + cli.SetErr(stderrW) + cli.SetArgs(r.args) + if r.stdinW != nil { + cli.SetIn(r.stdinR) + } + + // Register cleanup function to restore flags to their original values + // once test has been executed. This is needed because flag values reside + // in a global singleton data-structure, and thus subsequent tests might + // otherwise interfere with each other + r.registerFlagCleanup(cli) + + errch := make(chan error) + ctx, cancel := context.WithCancel(ctx) + + // Tee stdout/stderr to buffers. + stdoutR = io.TeeReader(stdoutR, &r.stdout) + stderrR = io.TeeReader(stderrR, &r.stderr) + + // Consume stdout/stderr line-by-line. + var wg sync.WaitGroup + r.StdoutLines = consumeLines(ctx, &wg, stdoutR) + r.StderrLines = consumeLines(ctx, &wg, stderrR) + + // Run command in background. + go func() { + err := root.Execute(ctx, cli) + if err != nil { + r.Logf("Error running command: %s", err) + } + + // Close pipes to signal EOF. + stdoutW.Close() + stderrW.Close() + + // Wait for the [consumeLines] routines to finish now that + // the pipes they're reading from have closed. + wg.Wait() + + if r.stdout.Len() > 0 { + // Make a copy of the buffer such that it remains "unread". + scanner := bufio.NewScanner(bytes.NewBuffer(r.stdout.Bytes())) + for scanner.Scan() { + r.Logf("[databricks stdout]: %s", scanner.Text()) + } + } + + if r.stderr.Len() > 0 { + // Make a copy of the buffer such that it remains "unread". + scanner := bufio.NewScanner(bytes.NewBuffer(r.stderr.Bytes())) + for scanner.Scan() { + r.Logf("[databricks stderr]: %s", scanner.Text()) + } + } + + // Reset context on command for the next test. + // These commands are globals so we have to clean up to the best of our ability after each run. + // See https://github.com/spf13/cobra/blob/a6f198b635c4b18fff81930c40d464904e55b161/command.go#L1062-L1066 + //nolint:staticcheck // cobra sets the context and doesn't clear it + cli.SetContext(nil) + + // Make caller aware of error. + errch <- err + close(errch) + }() + + // Ensure command terminates upon test completion (success or failure). + r.Cleanup(func() { + // Signal termination of command. + cancel() + // Wait for goroutine to finish. + <-errch + }) + + r.errch = errch +} + +func (r *Runner) Run() (bytes.Buffer, bytes.Buffer, error) { + r.RunBackground() + err := <-r.errch + return r.stdout, r.stderr, err +} + +// Like [require.Eventually] but errors if the underlying command has failed. +func (r *Runner) Eventually(condition func() bool, waitFor, tick time.Duration, msgAndArgs ...any) { + ch := make(chan bool, 1) + + timer := time.NewTimer(waitFor) + defer timer.Stop() + + ticker := time.NewTicker(tick) + defer ticker.Stop() + + // Kick off condition check immediately. + go func() { ch <- condition() }() + + for tick := ticker.C; ; { + select { + case err := <-r.errch: + require.Fail(r, "Command failed", err) + return + case <-timer.C: + require.Fail(r, "Condition never satisfied", msgAndArgs...) + return + case <-tick: + tick = nil + go func() { ch <- condition() }() + case v := <-ch: + if v { + return + } + tick = ticker.C + } + } +} + +func (r *Runner) RunAndExpectOutput(heredoc string) { + stdout, _, err := r.Run() + require.NoError(r, err) + require.Equal(r, cmdio.Heredoc(heredoc), strings.TrimSpace(stdout.String())) +} + +func (r *Runner) RunAndParseJSON(v any) { + stdout, _, err := r.Run() + require.NoError(r, err) + err = json.Unmarshal(stdout.Bytes(), &v) + require.NoError(r, err) +} + +func NewRunner(t testutil.TestingT, ctx context.Context, args ...string) *Runner { + return &Runner{ + TestingT: t, + + ctx: ctx, + args: args, + } +} + +func RequireSuccessfulRun(t testutil.TestingT, ctx context.Context, args ...string) (bytes.Buffer, bytes.Buffer) { + t.Logf("run args: [%s]", strings.Join(args, ", ")) + r := NewRunner(t, ctx, args...) + stdout, stderr, err := r.Run() + require.NoError(t, err) + return stdout, stderr +} + +func RequireErrorRun(t testutil.TestingT, ctx context.Context, args ...string) (bytes.Buffer, bytes.Buffer, error) { + r := NewRunner(t, ctx, args...) + stdout, stderr, err := r.Run() + require.Error(t, err) + return stdout, stderr, err +} diff --git a/internal/testutil/cloud.go b/internal/testutil/cloud.go index ba5b75ecf..4a8a89645 100644 --- a/internal/testutil/cloud.go +++ b/internal/testutil/cloud.go @@ -1,9 +1,5 @@ package testutil -import ( - "testing" -) - type Cloud int const ( @@ -13,7 +9,7 @@ const ( ) // Implement [Requirement]. -func (c Cloud) Verify(t *testing.T) { +func (c Cloud) Verify(t TestingT) { if c != GetCloud(t) { t.Skipf("Skipping %s-specific test", c) } @@ -32,7 +28,20 @@ func (c Cloud) String() string { } } -func GetCloud(t *testing.T) Cloud { +func (c Cloud) NodeTypeID() string { + switch c { + case AWS: + return "i3.xlarge" + case Azure: + return "Standard_DS4_v2" + case GCP: + return "n1-standard-4" + default: + return "unknown" + } +} + +func GetCloud(t TestingT) Cloud { env := GetEnvOrSkipTest(t, "CLOUD_ENV") switch env { case "aws": @@ -50,6 +59,6 @@ func GetCloud(t *testing.T) Cloud { return -1 } -func IsAWSCloud(t *testing.T) bool { +func IsAWSCloud(t TestingT) bool { return GetCloud(t) == AWS } diff --git a/internal/testutil/copy.go b/internal/testutil/copy.go index 21faece00..a521da3e3 100644 --- a/internal/testutil/copy.go +++ b/internal/testutil/copy.go @@ -5,14 +5,13 @@ import ( "io/fs" "os" "path/filepath" - "testing" "github.com/stretchr/testify/require" ) // CopyDirectory copies the contents of a directory to another directory. // The destination directory is created if it does not exist. -func CopyDirectory(t *testing.T, src, dst string) { +func CopyDirectory(t TestingT, src, dst string) { err := filepath.WalkDir(src, func(path string, d fs.DirEntry, err error) error { if err != nil { return err @@ -22,7 +21,7 @@ func CopyDirectory(t *testing.T, src, dst string) { require.NoError(t, err) if d.IsDir() { - return os.MkdirAll(filepath.Join(dst, rel), 0755) + return os.MkdirAll(filepath.Join(dst, rel), 0o755) } // Copy the file to the temporary directory diff --git a/internal/testutil/env.go b/internal/testutil/env.go index e1973ba82..10557c4e6 100644 --- a/internal/testutil/env.go +++ b/internal/testutil/env.go @@ -5,7 +5,6 @@ import ( "path/filepath" "runtime" "strings" - "testing" "github.com/stretchr/testify/require" ) @@ -13,7 +12,7 @@ import ( // CleanupEnvironment sets up a pristine environment containing only $PATH and $HOME. // The original environment is restored upon test completion. // Note: use of this function is incompatible with parallel execution. -func CleanupEnvironment(t *testing.T) { +func CleanupEnvironment(t TestingT) { // Restore environment when test finishes. environ := os.Environ() t.Cleanup(func() { @@ -39,18 +38,13 @@ func CleanupEnvironment(t *testing.T) { } } -// GetEnvOrSkipTest proceeds with test only with that env variable -func GetEnvOrSkipTest(t *testing.T, name string) string { - value := os.Getenv(name) - if value == "" { - t.Skipf("Environment variable %s is missing", name) - } - return value -} - // Changes into specified directory for the duration of the test. // Returns the current working directory. -func Chdir(t *testing.T, dir string) string { +func Chdir(t TestingT, dir string) string { + // Prevent parallel execution when changing the working directory. + // t.Setenv automatically fails if t.Parallel is set. + t.Setenv("DO_NOT_RUN_IN_PARALLEL", "true") + wd, err := os.Getwd() require.NoError(t, err) diff --git a/internal/testutil/file.go b/internal/testutil/file.go index ba2c3280e..538a3c20a 100644 --- a/internal/testutil/file.go +++ b/internal/testutil/file.go @@ -3,24 +3,23 @@ package testutil import ( "os" "path/filepath" - "testing" "github.com/stretchr/testify/require" ) -func TouchNotebook(t *testing.T, elems ...string) string { +func TouchNotebook(t TestingT, elems ...string) string { path := filepath.Join(elems...) - err := os.MkdirAll(filepath.Dir(path), 0755) + err := os.MkdirAll(filepath.Dir(path), 0o755) require.NoError(t, err) - err = os.WriteFile(path, []byte("# Databricks notebook source"), 0644) + err = os.WriteFile(path, []byte("# Databricks notebook source"), 0o644) require.NoError(t, err) return path } -func Touch(t *testing.T, elems ...string) string { +func Touch(t TestingT, elems ...string) string { path := filepath.Join(elems...) - err := os.MkdirAll(filepath.Dir(path), 0755) + err := os.MkdirAll(filepath.Dir(path), 0o755) require.NoError(t, err) f, err := os.Create(path) @@ -31,9 +30,9 @@ func Touch(t *testing.T, elems ...string) string { return path } -func WriteFile(t *testing.T, content string, elems ...string) string { - path := filepath.Join(elems...) - err := os.MkdirAll(filepath.Dir(path), 0755) +// WriteFile writes content to a file. +func WriteFile(t TestingT, path, content string) { + err := os.MkdirAll(filepath.Dir(path), 0o755) require.NoError(t, err) f, err := os.Create(path) @@ -44,5 +43,12 @@ func WriteFile(t *testing.T, content string, elems ...string) string { err = f.Close() require.NoError(t, err) - return path +} + +// ReadFile reads a file and returns its content as a string. +func ReadFile(t TestingT, path string) string { + b, err := os.ReadFile(path) + require.NoError(t, err) + + return string(b) } diff --git a/internal/acc/helpers.go b/internal/testutil/helpers.go similarity index 88% rename from internal/acc/helpers.go rename to internal/testutil/helpers.go index f98001346..69ed7595b 100644 --- a/internal/acc/helpers.go +++ b/internal/testutil/helpers.go @@ -1,15 +1,14 @@ -package acc +package testutil import ( "fmt" "math/rand" "os" "strings" - "testing" ) // GetEnvOrSkipTest proceeds with test only with that env variable. -func GetEnvOrSkipTest(t *testing.T, name string) string { +func GetEnvOrSkipTest(t TestingT, name string) string { value := os.Getenv(name) if value == "" { t.Skipf("Environment variable %s is missing", name) diff --git a/internal/testutil/interface.go b/internal/testutil/interface.go new file mode 100644 index 000000000..2c3004800 --- /dev/null +++ b/internal/testutil/interface.go @@ -0,0 +1,27 @@ +package testutil + +// TestingT is an interface wrapper around *testing.T that provides the methods +// that are used by the test package to convey information about test failures. +// +// We use an interface so we can wrap *testing.T and provide additional functionality. +type TestingT interface { + Log(args ...any) + Logf(format string, args ...any) + + Error(args ...any) + Errorf(format string, args ...any) + + Fatal(args ...any) + Fatalf(format string, args ...any) + + Skip(args ...any) + Skipf(format string, args ...any) + + FailNow() + + Cleanup(func()) + + Setenv(key, value string) + + TempDir() string +} diff --git a/internal/testutil/jdk.go b/internal/testutil/jdk.go index 05bd7d6d6..60fa439db 100644 --- a/internal/testutil/jdk.go +++ b/internal/testutil/jdk.go @@ -5,12 +5,11 @@ import ( "context" "os/exec" "strings" - "testing" "github.com/stretchr/testify/require" ) -func RequireJDK(t *testing.T, ctx context.Context, version string) { +func RequireJDK(t TestingT, ctx context.Context, version string) { var stderr bytes.Buffer cmd := exec.Command("javac", "-version") diff --git a/internal/testutil/requirement.go b/internal/testutil/requirement.go index 53855e0b5..e182b7518 100644 --- a/internal/testutil/requirement.go +++ b/internal/testutil/requirement.go @@ -1,18 +1,14 @@ package testutil -import ( - "testing" -) - // Requirement is the interface for test requirements. type Requirement interface { - Verify(t *testing.T) + Verify(t TestingT) } // Require should be called at the beginning of a test to ensure that all // requirements are met before running the test. // If any requirement is not met, the test will be skipped. -func Require(t *testing.T, requirements ...Requirement) { +func Require(t TestingT, requirements ...Requirement) { for _, r := range requirements { r.Verify(t) } diff --git a/internal/testutil/testutil_test.go b/internal/testutil/testutil_test.go new file mode 100644 index 000000000..d41374d55 --- /dev/null +++ b/internal/testutil/testutil_test.go @@ -0,0 +1,36 @@ +package testutil_test + +import ( + "go/parser" + "go/token" + "strings" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +// TestNoTestingImport checks that no file in the package imports the testing package. +// All exported functions must use the TestingT interface instead of *testing.T. +func TestNoTestingImport(t *testing.T) { + // Parse the package + fset := token.NewFileSet() + pkgs, err := parser.ParseDir(fset, ".", nil, parser.AllErrors) + require.NoError(t, err) + + // Iterate through the files in the package + for _, pkg := range pkgs { + for _, file := range pkg.Files { + // Skip test files + if strings.HasSuffix(fset.Position(file.Pos()).Filename, "_test.go") { + continue + } + // Check the imports of each file + for _, imp := range file.Imports { + if imp.Path.Value == `"testing"` { + assert.Fail(t, "File imports the testing package", "File %s imports the testing package", fset.Position(file.Pos()).Filename) + } + } + } + } +} diff --git a/libs/auth/callback.go b/libs/auth/callback.go index 5a2400697..3893a5041 100644 --- a/libs/auth/callback.go +++ b/libs/auth/callback.go @@ -53,7 +53,9 @@ func newCallback(ctx context.Context, a *PersistentAuth) (*callbackServer, error a: a, } cb.srv.Handler = cb - go cb.srv.Serve(cb.ln) + go func() { + _ = cb.srv.Serve(cb.ln) + }() return cb, nil } diff --git a/libs/cmdgroup/command_test.go b/libs/cmdgroup/command_test.go index f3e3fe6ab..2c248f09f 100644 --- a/libs/cmdgroup/command_test.go +++ b/libs/cmdgroup/command_test.go @@ -42,7 +42,8 @@ func TestCommandFlagGrouping(t *testing.T) { buf := bytes.NewBuffer(nil) cmd.SetOutput(buf) - cmd.Usage() + err := cmd.Usage() + require.NoError(t, err) expected := `Usage: parent test [flags] diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index 75c0c4b87..c0e9e868a 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -31,9 +31,9 @@ type cmdIO struct { err io.Writer } -func NewIO(outputFormat flags.Output, in io.Reader, out io.Writer, err io.Writer, headerTemplate, template string) *cmdIO { +func NewIO(ctx context.Context, outputFormat flags.Output, in io.Reader, out, err io.Writer, headerTemplate, template string) *cmdIO { // The check below is similar to color.NoColor but uses the specified err writer. - dumb := os.Getenv("NO_COLOR") != "" || os.Getenv("TERM") == "dumb" + dumb := env.Get(ctx, "NO_COLOR") != "" || env.Get(ctx, "TERM") == "dumb" if f, ok := err.(*os.File); ok && !dumb { dumb = !isatty.IsTerminal(f.Fd()) && !isatty.IsCygwinTerminal(f.Fd()) } diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index 45b1883ce..7bc95e9a5 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -151,7 +151,7 @@ func (l *Logger) AskSelect(question string, choices []string) (string, error) { return ans, nil } -func (l *Logger) Ask(question string, defaultVal string) (string, error) { +func (l *Logger) Ask(question, defaultVal string) (string, error) { if l.Mode == flags.ModeJson { return "", fmt.Errorf("question prompts are not supported in json mode") } @@ -188,29 +188,29 @@ func (l *Logger) writeJson(event Event) { // we panic because there we cannot catch this in jobs.RunNowAndWait panic(err) } - l.Writer.Write([]byte(b)) - l.Writer.Write([]byte("\n")) + _, _ = l.Writer.Write([]byte(b)) + _, _ = l.Writer.Write([]byte("\n")) } func (l *Logger) writeAppend(event Event) { - l.Writer.Write([]byte(event.String())) - l.Writer.Write([]byte("\n")) + _, _ = l.Writer.Write([]byte(event.String())) + _, _ = l.Writer.Write([]byte("\n")) } func (l *Logger) writeInplace(event Event) { if l.isFirstEvent { // save cursor location - l.Writer.Write([]byte("\033[s")) + _, _ = l.Writer.Write([]byte("\033[s")) } // move cursor to saved location - l.Writer.Write([]byte("\033[u")) + _, _ = l.Writer.Write([]byte("\033[u")) // clear from cursor to end of screen - l.Writer.Write([]byte("\033[0J")) + _, _ = l.Writer.Write([]byte("\033[0J")) - l.Writer.Write([]byte(event.String())) - l.Writer.Write([]byte("\n")) + _, _ = l.Writer.Write([]byte(event.String())) + _, _ = l.Writer.Write([]byte("\n")) l.isFirstEvent = false } @@ -234,5 +234,4 @@ func (l *Logger) Log(event Event) { // jobs.RunNowAndWait panic("unknown progress logger mode: " + l.Mode.String()) } - } diff --git a/libs/cmdio/render.go b/libs/cmdio/render.go index c68ddca0d..1529274a3 100644 --- a/libs/cmdio/render.go +++ b/libs/cmdio/render.go @@ -361,7 +361,9 @@ func renderUsingTemplate(ctx context.Context, r templateRenderer, w io.Writer, h if err != nil { return err } - tw.Write([]byte("\n")) + if _, err := tw.Write([]byte("\n")); err != nil { + return err + } // Do not flush here. Instead, allow the first 100 resources to determine the initial spacing of the header columns. } t, err := base.Parse(tmpl) diff --git a/libs/cmdio/render_test.go b/libs/cmdio/render_test.go index 6bde446c4..f26190a23 100644 --- a/libs/cmdio/render_test.go +++ b/libs/cmdio/render_test.go @@ -171,8 +171,9 @@ func TestRender(t *testing.T) { for _, c := range testCases { t.Run(c.name, func(t *testing.T) { output := &bytes.Buffer{} - cmdIO := NewIO(c.outputFormat, nil, output, output, c.headerTemplate, c.template) - ctx := InContext(context.Background(), cmdIO) + ctx := context.Background() + cmdIO := NewIO(ctx, c.outputFormat, nil, output, output, c.headerTemplate, c.template) + ctx = InContext(ctx, cmdIO) var err error if vv, ok := c.v.(listing.Iterator[*provisioning.Workspace]); ok { err = RenderIterator(ctx, vv) diff --git a/libs/databrickscfg/cfgpickers/clusters.go b/libs/databrickscfg/cfgpickers/clusters.go index cac1b08a7..6ae7d99c6 100644 --- a/libs/databrickscfg/cfgpickers/clusters.go +++ b/libs/databrickscfg/cfgpickers/clusters.go @@ -18,8 +18,10 @@ import ( var minUcRuntime = canonicalVersion("v12.0") -var dbrVersionRegex = regexp.MustCompile(`^(\d+\.\d+)\.x-.*`) -var dbrSnapshotVersionRegex = regexp.MustCompile(`^(\d+)\.x-snapshot.*`) +var ( + dbrVersionRegex = regexp.MustCompile(`^(\d+\.\d+)\.x-.*`) + dbrSnapshotVersionRegex = regexp.MustCompile(`^(\d+)\.x-snapshot.*`) +) func canonicalVersion(v string) string { return semver.Canonical("v" + strings.TrimPrefix(v, "v")) diff --git a/libs/databrickscfg/cfgpickers/clusters_test.go b/libs/databrickscfg/cfgpickers/clusters_test.go index d17e86d4a..cde09aa44 100644 --- a/libs/databrickscfg/cfgpickers/clusters_test.go +++ b/libs/databrickscfg/cfgpickers/clusters_test.go @@ -115,7 +115,7 @@ func TestFirstCompatibleCluster(t *testing.T) { w := databricks.Must(databricks.NewWorkspaceClient((*databricks.Config)(cfg))) ctx := context.Background() - ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) + ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) clusterID, err := AskForCluster(ctx, w, WithDatabricksConnect("13.1")) require.NoError(t, err) require.Equal(t, "bcd-id", clusterID) @@ -162,7 +162,7 @@ func TestNoCompatibleClusters(t *testing.T) { w := databricks.Must(databricks.NewWorkspaceClient((*databricks.Config)(cfg))) ctx := context.Background() - ctx = cmdio.InContext(ctx, cmdio.NewIO(flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) + ctx = cmdio.InContext(ctx, cmdio.NewIO(ctx, flags.OutputText, &bytes.Buffer{}, &bytes.Buffer{}, &bytes.Buffer{}, "", "...")) _, err := AskForCluster(ctx, w, WithDatabricksConnect("13.1")) require.Equal(t, ErrNoCompatibleClusters, err) } diff --git a/libs/dyn/convert/from_typed.go b/libs/dyn/convert/from_typed.go index cd92ad0eb..ed1b85a36 100644 --- a/libs/dyn/convert/from_typed.go +++ b/libs/dyn/convert/from_typed.go @@ -126,7 +126,7 @@ func fromTypedStruct(src reflect.Value, ref dyn.Value, options ...fromTypedOptio // Either if the key was set in the reference or the field is not zero-valued, we include it. if ok || nv.Kind() != dyn.KindNil { - out.Set(refk, nv) + out.Set(refk, nv) // nolint:errcheck } } @@ -184,7 +184,7 @@ func fromTypedMap(src reflect.Value, ref dyn.Value) (dyn.Value, error) { // Every entry is represented, even if it is a nil. // Otherwise, a map with zero-valued structs would yield a nil as well. - out.Set(refk, nv) + out.Set(refk, nv) //nolint:errcheck } return dyn.V(out), nil diff --git a/libs/dyn/convert/from_typed_test.go b/libs/dyn/convert/from_typed_test.go index 0cddff3be..8a05bfb38 100644 --- a/libs/dyn/convert/from_typed_test.go +++ b/libs/dyn/convert/from_typed_test.go @@ -325,7 +325,7 @@ func TestFromTypedMapNil(t *testing.T) { } func TestFromTypedMapEmpty(t *testing.T) { - var src = map[string]string{} + src := map[string]string{} ref := dyn.V(map[string]dyn.Value{ "foo": dyn.V("bar"), @@ -338,7 +338,7 @@ func TestFromTypedMapEmpty(t *testing.T) { } func TestFromTypedMapNonEmpty(t *testing.T) { - var src = map[string]string{ + src := map[string]string{ "foo": "foo", "bar": "bar", } @@ -353,7 +353,7 @@ func TestFromTypedMapNonEmpty(t *testing.T) { } func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { - var src = map[string]string{ + src := map[string]string{ "foo": "bar", "bar": "qux", } @@ -372,7 +372,7 @@ func TestFromTypedMapNonEmptyRetainLocation(t *testing.T) { } func TestFromTypedMapFieldWithZeroValue(t *testing.T) { - var src = map[string]string{ + src := map[string]string{ "foo": "", } @@ -398,7 +398,7 @@ func TestFromTypedSliceNil(t *testing.T) { } func TestFromTypedSliceEmpty(t *testing.T) { - var src = []string{} + src := []string{} ref := dyn.V([]dyn.Value{ dyn.V("bar"), @@ -411,7 +411,7 @@ func TestFromTypedSliceEmpty(t *testing.T) { } func TestFromTypedSliceNonEmpty(t *testing.T) { - var src = []string{ + src := []string{ "foo", "bar", } @@ -426,7 +426,7 @@ func TestFromTypedSliceNonEmpty(t *testing.T) { } func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { - var src = []string{ + src := []string{ "foo", "bar", } @@ -446,7 +446,7 @@ func TestFromTypedSliceNonEmptyRetainLocation(t *testing.T) { func TestFromTypedStringEmpty(t *testing.T) { var src string - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) @@ -454,7 +454,7 @@ func TestFromTypedStringEmpty(t *testing.T) { func TestFromTypedStringEmptyOverwrite(t *testing.T) { var src string - var ref = dyn.V("old") + ref := dyn.V("old") nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(""), nv) @@ -462,7 +462,7 @@ func TestFromTypedStringEmptyOverwrite(t *testing.T) { func TestFromTypedStringNonEmpty(t *testing.T) { var src string = "new" - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V("new"), nv) @@ -470,14 +470,14 @@ func TestFromTypedStringNonEmpty(t *testing.T) { func TestFromTypedStringNonEmptyOverwrite(t *testing.T) { var src string = "new" - var ref = dyn.V("old") + ref := dyn.V("old") nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V("new"), nv) } func TestFromTypedStringRetainsLocations(t *testing.T) { - var ref = dyn.NewValue("foo", []dyn.Location{{File: "foo"}}) + ref := dyn.NewValue("foo", []dyn.Location{{File: "foo"}}) // case: value has not been changed var src string = "foo" @@ -494,14 +494,14 @@ func TestFromTypedStringRetainsLocations(t *testing.T) { func TestFromTypedStringTypeError(t *testing.T) { var src string = "foo" - var ref = dyn.V(1234) + ref := dyn.V(1234) _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedBoolEmpty(t *testing.T) { var src bool - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) @@ -509,7 +509,7 @@ func TestFromTypedBoolEmpty(t *testing.T) { func TestFromTypedBoolEmptyOverwrite(t *testing.T) { var src bool - var ref = dyn.V(true) + ref := dyn.V(true) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(false), nv) @@ -517,7 +517,7 @@ func TestFromTypedBoolEmptyOverwrite(t *testing.T) { func TestFromTypedBoolNonEmpty(t *testing.T) { var src bool = true - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(true), nv) @@ -525,14 +525,14 @@ func TestFromTypedBoolNonEmpty(t *testing.T) { func TestFromTypedBoolNonEmptyOverwrite(t *testing.T) { var src bool = true - var ref = dyn.V(false) + ref := dyn.V(false) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(true), nv) } func TestFromTypedBoolRetainsLocations(t *testing.T) { - var ref = dyn.NewValue(true, []dyn.Location{{File: "foo"}}) + ref := dyn.NewValue(true, []dyn.Location{{File: "foo"}}) // case: value has not been changed var src bool = true @@ -549,7 +549,7 @@ func TestFromTypedBoolRetainsLocations(t *testing.T) { func TestFromTypedBoolVariableReference(t *testing.T) { var src bool = true - var ref = dyn.V("${var.foo}") + ref := dyn.V("${var.foo}") nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V("${var.foo}"), nv) @@ -557,14 +557,14 @@ func TestFromTypedBoolVariableReference(t *testing.T) { func TestFromTypedBoolTypeError(t *testing.T) { var src bool = true - var ref = dyn.V("string") + ref := dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedIntEmpty(t *testing.T) { var src int - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) @@ -572,7 +572,7 @@ func TestFromTypedIntEmpty(t *testing.T) { func TestFromTypedIntEmptyOverwrite(t *testing.T) { var src int - var ref = dyn.V(1234) + ref := dyn.V(1234) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(int64(0)), nv) @@ -580,7 +580,7 @@ func TestFromTypedIntEmptyOverwrite(t *testing.T) { func TestFromTypedIntNonEmpty(t *testing.T) { var src int = 1234 - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(int64(1234)), nv) @@ -588,14 +588,14 @@ func TestFromTypedIntNonEmpty(t *testing.T) { func TestFromTypedIntNonEmptyOverwrite(t *testing.T) { var src int = 1234 - var ref = dyn.V(1233) + ref := dyn.V(1233) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(int64(1234)), nv) } func TestFromTypedIntRetainsLocations(t *testing.T) { - var ref = dyn.NewValue(1234, []dyn.Location{{File: "foo"}}) + ref := dyn.NewValue(1234, []dyn.Location{{File: "foo"}}) // case: value has not been changed var src int = 1234 @@ -612,7 +612,7 @@ func TestFromTypedIntRetainsLocations(t *testing.T) { func TestFromTypedIntVariableReference(t *testing.T) { var src int = 1234 - var ref = dyn.V("${var.foo}") + ref := dyn.V("${var.foo}") nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V("${var.foo}"), nv) @@ -620,14 +620,14 @@ func TestFromTypedIntVariableReference(t *testing.T) { func TestFromTypedIntTypeError(t *testing.T) { var src int = 1234 - var ref = dyn.V("string") + ref := dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } func TestFromTypedFloatEmpty(t *testing.T) { var src float64 - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) @@ -635,7 +635,7 @@ func TestFromTypedFloatEmpty(t *testing.T) { func TestFromTypedFloatEmptyOverwrite(t *testing.T) { var src float64 - var ref = dyn.V(1.23) + ref := dyn.V(1.23) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(0.0), nv) @@ -643,7 +643,7 @@ func TestFromTypedFloatEmptyOverwrite(t *testing.T) { func TestFromTypedFloatNonEmpty(t *testing.T) { var src float64 = 1.23 - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(1.23), nv) @@ -651,7 +651,7 @@ func TestFromTypedFloatNonEmpty(t *testing.T) { func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { var src float64 = 1.23 - var ref = dyn.V(1.24) + ref := dyn.V(1.24) nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V(1.23), nv) @@ -659,7 +659,7 @@ func TestFromTypedFloatNonEmptyOverwrite(t *testing.T) { func TestFromTypedFloatRetainsLocations(t *testing.T) { var src float64 - var ref = dyn.NewValue(1.23, []dyn.Location{{File: "foo"}}) + ref := dyn.NewValue(1.23, []dyn.Location{{File: "foo"}}) // case: value has not been changed src = 1.23 @@ -676,7 +676,7 @@ func TestFromTypedFloatRetainsLocations(t *testing.T) { func TestFromTypedFloatVariableReference(t *testing.T) { var src float64 = 1.23 - var ref = dyn.V("${var.foo}") + ref := dyn.V("${var.foo}") nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.V("${var.foo}"), nv) @@ -684,7 +684,7 @@ func TestFromTypedFloatVariableReference(t *testing.T) { func TestFromTypedFloatTypeError(t *testing.T) { var src float64 = 1.23 - var ref = dyn.V("string") + ref := dyn.V("string") _, err := FromTyped(src, ref) require.Error(t, err) } @@ -727,7 +727,7 @@ func TestFromTypedAny(t *testing.T) { func TestFromTypedAnyNil(t *testing.T) { var src any = nil - var ref = dyn.NilValue + ref := dyn.NilValue nv, err := FromTyped(src, ref) require.NoError(t, err) assert.Equal(t, dyn.NilValue, nv) diff --git a/libs/dyn/convert/normalize.go b/libs/dyn/convert/normalize.go index 106add35d..31cd8b6e3 100644 --- a/libs/dyn/convert/normalize.go +++ b/libs/dyn/convert/normalize.go @@ -116,7 +116,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen } } - out.Set(pk, nv) + out.Set(pk, nv) //nolint:errcheck } // Return the normalized value if missing fields are not included. @@ -162,7 +162,7 @@ func (n normalizeOptions) normalizeStruct(typ reflect.Type, src dyn.Value, seen continue } if v.IsValid() { - out.Set(dyn.V(k), v) + out.Set(dyn.V(k), v) // nolint:errcheck } } @@ -201,7 +201,7 @@ func (n normalizeOptions) normalizeMap(typ reflect.Type, src dyn.Value, seen []r } } - out.Set(pk, nv) + out.Set(pk, nv) //nolint:errcheck } return dyn.NewValue(out, src.Locations()), diags diff --git a/libs/dyn/convert/normalize_test.go b/libs/dyn/convert/normalize_test.go index ab0a1cec1..449c09075 100644 --- a/libs/dyn/convert/normalize_test.go +++ b/libs/dyn/convert/normalize_test.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/libs/diag" "github.com/databricks/cli/libs/dyn" assert "github.com/databricks/cli/libs/dyn/dynassert" + "github.com/stretchr/testify/require" ) func TestNormalizeStruct(t *testing.T) { @@ -20,8 +21,8 @@ func TestNormalizeStruct(t *testing.T) { "bar": dyn.V("baz"), }) - vout, err := Normalize(typ, vin) - assert.Empty(t, err) + vout, diags := Normalize(typ, vin) + assert.Empty(t, diags) assert.Equal(t, vin, vout) } @@ -37,14 +38,14 @@ func TestNormalizeStructElementDiagnostic(t *testing.T) { "bar": dyn.V(map[string]dyn.Value{"an": dyn.V("error")}), }) - vout, err := Normalize(typ, vin) - assert.Len(t, err, 1) + vout, diags := Normalize(typ, vin) + assert.Len(t, diags, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `expected string, found map`, Locations: []dyn.Location{{}}, Paths: []dyn.Path{dyn.NewPath(dyn.Key("bar"))}, - }, err[0]) + }, diags[0]) // Elements that encounter an error during normalization are dropped. assert.Equal(t, map[string]any{ @@ -60,17 +61,20 @@ func TestNormalizeStructUnknownField(t *testing.T) { var typ Tmp m := dyn.NewMapping() - m.Set(dyn.V("foo"), dyn.V("val-foo")) + err := m.Set(dyn.V("foo"), dyn.V("val-foo")) + require.NoError(t, err) + // Set the unknown field, with location information. - m.Set(dyn.NewValue("bar", []dyn.Location{ + err = m.Set(dyn.NewValue("bar", []dyn.Location{ {File: "hello.yaml", Line: 1, Column: 1}, {File: "world.yaml", Line: 2, Column: 2}, }), dyn.V("var-bar")) + require.NoError(t, err) vin := dyn.V(m) - vout, err := Normalize(typ, vin) - assert.Len(t, err, 1) + vout, diags := Normalize(typ, vin) + assert.Len(t, diags, 1) assert.Equal(t, diag.Diagnostic{ Severity: diag.Warning, Summary: `unknown field: bar`, @@ -80,7 +84,7 @@ func TestNormalizeStructUnknownField(t *testing.T) { {File: "world.yaml", Line: 2, Column: 2}, }, Paths: []dyn.Path{dyn.EmptyPath}, - }, err[0]) + }, diags[0]) // The field that can be mapped to the struct field is retained. assert.Equal(t, map[string]any{ diff --git a/libs/dyn/convert/struct_info.go b/libs/dyn/convert/struct_info.go index dc3ed4da4..f5fd29cb9 100644 --- a/libs/dyn/convert/struct_info.go +++ b/libs/dyn/convert/struct_info.go @@ -43,7 +43,7 @@ func getStructInfo(typ reflect.Type) structInfo { // buildStructInfo populates a new [structInfo] for the given type. func buildStructInfo(typ reflect.Type) structInfo { - var out = structInfo{ + out := structInfo{ Fields: make(map[string][]int), } @@ -102,7 +102,7 @@ func buildStructInfo(typ reflect.Type) structInfo { } func (s *structInfo) FieldValues(v reflect.Value) map[string]reflect.Value { - var out = make(map[string]reflect.Value) + out := make(map[string]reflect.Value) for k, index := range s.Fields { fv := v diff --git a/libs/dyn/convert/struct_info_test.go b/libs/dyn/convert/struct_info_test.go index 20348ff60..bc10db9da 100644 --- a/libs/dyn/convert/struct_info_test.go +++ b/libs/dyn/convert/struct_info_test.go @@ -95,7 +95,7 @@ func TestStructInfoFieldValues(t *testing.T) { Bar string `json:"bar"` } - var src = Tmp{ + src := Tmp{ Foo: "foo", Bar: "bar", } @@ -121,7 +121,7 @@ func TestStructInfoFieldValuesAnonymousByValue(t *testing.T) { Foo } - var src = Tmp{ + src := Tmp{ Foo: Foo{ Foo: "foo", Bar: Bar{ diff --git a/libs/dyn/convert/to_typed_test.go b/libs/dyn/convert/to_typed_test.go index 78221c299..4a56dd4fc 100644 --- a/libs/dyn/convert/to_typed_test.go +++ b/libs/dyn/convert/to_typed_test.go @@ -44,7 +44,7 @@ func TestToTypedStructOverwrite(t *testing.T) { Qux string `json:"-"` } - var out = Tmp{ + out := Tmp{ Foo: "baz", Bar: "qux", } @@ -66,7 +66,7 @@ func TestToTypedStructClearFields(t *testing.T) { } // Struct value with non-empty fields. - var out = Tmp{ + out := Tmp{ Foo: "baz", Bar: "qux", } @@ -137,7 +137,7 @@ func TestToTypedStructNil(t *testing.T) { Foo string `json:"foo"` } - var out = Tmp{} + out := Tmp{} err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Equal(t, Tmp{}, out) @@ -148,7 +148,7 @@ func TestToTypedStructNilOverwrite(t *testing.T) { Foo string `json:"foo"` } - var out = Tmp{"bar"} + out := Tmp{"bar"} err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Equal(t, Tmp{}, out) @@ -173,7 +173,7 @@ func TestToTypedStructWithValueField(t *testing.T) { } func TestToTypedMap(t *testing.T) { - var out = map[string]string{} + out := map[string]string{} v := dyn.V(map[string]dyn.Value{ "key": dyn.V("value"), @@ -186,7 +186,7 @@ func TestToTypedMap(t *testing.T) { } func TestToTypedMapOverwrite(t *testing.T) { - var out = map[string]string{ + out := map[string]string{ "foo": "bar", } @@ -214,14 +214,14 @@ func TestToTypedMapWithPointerElement(t *testing.T) { } func TestToTypedMapNil(t *testing.T) { - var out = map[string]string{} + out := map[string]string{} err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) } func TestToTypedMapNilOverwrite(t *testing.T) { - var out = map[string]string{ + out := map[string]string{ "foo": "bar", } err := ToTyped(&out, dyn.NilValue) @@ -245,7 +245,7 @@ func TestToTypedSlice(t *testing.T) { } func TestToTypedSliceOverwrite(t *testing.T) { - var out = []string{"qux"} + out := []string{"qux"} v := dyn.V([]dyn.Value{ dyn.V("foo"), @@ -282,7 +282,7 @@ func TestToTypedSliceNil(t *testing.T) { } func TestToTypedSliceNilOverwrite(t *testing.T) { - var out = []string{"foo"} + out := []string{"foo"} err := ToTyped(&out, dyn.NilValue) require.NoError(t, err) assert.Nil(t, out) diff --git a/libs/dyn/dynassert/assert.go b/libs/dyn/dynassert/assert.go index ebdba1214..616a588ec 100644 --- a/libs/dyn/dynassert/assert.go +++ b/libs/dyn/dynassert/assert.go @@ -5,7 +5,7 @@ import ( "github.com/stretchr/testify/assert" ) -func Equal(t assert.TestingT, expected any, actual any, msgAndArgs ...any) bool { +func Equal(t assert.TestingT, expected, actual any, msgAndArgs ...any) bool { ev, eok := expected.(dyn.Value) av, aok := actual.(dyn.Value) if eok && aok && ev.IsValid() && av.IsValid() { @@ -36,7 +36,7 @@ func EqualValues(t assert.TestingT, expected, actual any, msgAndArgs ...any) boo return assert.EqualValues(t, expected, actual, msgAndArgs...) } -func NotEqual(t assert.TestingT, expected any, actual any, msgAndArgs ...any) bool { +func NotEqual(t assert.TestingT, expected, actual any, msgAndArgs ...any) bool { return assert.NotEqual(t, expected, actual, msgAndArgs...) } @@ -84,11 +84,11 @@ func False(t assert.TestingT, value bool, msgAndArgs ...any) bool { return assert.False(t, value, msgAndArgs...) } -func Contains(t assert.TestingT, list any, element any, msgAndArgs ...any) bool { +func Contains(t assert.TestingT, list, element any, msgAndArgs ...any) bool { return assert.Contains(t, list, element, msgAndArgs...) } -func NotContains(t assert.TestingT, list any, element any, msgAndArgs ...any) bool { +func NotContains(t assert.TestingT, list, element any, msgAndArgs ...any) bool { return assert.NotContains(t, list, element, msgAndArgs...) } @@ -112,6 +112,6 @@ func NotPanics(t assert.TestingT, f func(), msgAndArgs ...any) bool { return assert.NotPanics(t, f, msgAndArgs...) } -func JSONEq(t assert.TestingT, expected string, actual string, msgAndArgs ...any) bool { +func JSONEq(t assert.TestingT, expected, actual string, msgAndArgs ...any) bool { return assert.JSONEq(t, expected, actual, msgAndArgs...) } diff --git a/libs/dyn/dynassert/assert_test.go b/libs/dyn/dynassert/assert_test.go index 43258bd20..c8c2d6960 100644 --- a/libs/dyn/dynassert/assert_test.go +++ b/libs/dyn/dynassert/assert_test.go @@ -13,7 +13,7 @@ import ( ) func TestThatThisTestPackageIsUsed(t *testing.T) { - var base = ".." + base := ".." var files []string err := fs.WalkDir(os.DirFS(base), ".", func(path string, d fs.DirEntry, err error) error { if d.IsDir() { diff --git a/libs/dyn/jsonloader/json.go b/libs/dyn/jsonloader/json.go index cbf539263..3f2dc859f 100644 --- a/libs/dyn/jsonloader/json.go +++ b/libs/dyn/jsonloader/json.go @@ -70,7 +70,7 @@ func decodeValue(decoder *json.Decoder, o *Offset) (dyn.Value, error) { return invalidValueWithLocation(decoder, o), err } - obj.Set(keyVal, val) + obj.Set(keyVal, val) //nolint:errcheck } // Consume the closing '}' if _, err := decoder.Token(); err != nil { diff --git a/libs/dyn/jsonsaver/marshal_test.go b/libs/dyn/jsonsaver/marshal_test.go index 0b6a34283..e8897ea49 100644 --- a/libs/dyn/jsonsaver/marshal_test.go +++ b/libs/dyn/jsonsaver/marshal_test.go @@ -5,6 +5,7 @@ import ( "github.com/databricks/cli/libs/dyn" assert "github.com/databricks/cli/libs/dyn/dynassert" + "github.com/stretchr/testify/require" ) func TestMarshal_String(t *testing.T) { @@ -44,8 +45,8 @@ func TestMarshal_Time(t *testing.T) { func TestMarshal_Map(t *testing.T) { m := dyn.NewMapping() - m.Set(dyn.V("key1"), dyn.V("value1")) - m.Set(dyn.V("key2"), dyn.V("value2")) + require.NoError(t, m.Set(dyn.V("key1"), dyn.V("value1"))) + require.NoError(t, m.Set(dyn.V("key2"), dyn.V("value2"))) b, err := Marshal(dyn.V(m)) if assert.NoError(t, err) { @@ -66,16 +67,16 @@ func TestMarshal_Sequence(t *testing.T) { func TestMarshal_Complex(t *testing.T) { map1 := dyn.NewMapping() - map1.Set(dyn.V("str1"), dyn.V("value1")) - map1.Set(dyn.V("str2"), dyn.V("value2")) + require.NoError(t, map1.Set(dyn.V("str1"), dyn.V("value1"))) + require.NoError(t, map1.Set(dyn.V("str2"), dyn.V("value2"))) seq1 := []dyn.Value{} seq1 = append(seq1, dyn.V("value1")) seq1 = append(seq1, dyn.V("value2")) root := dyn.NewMapping() - root.Set(dyn.V("map1"), dyn.V(map1)) - root.Set(dyn.V("seq1"), dyn.V(seq1)) + require.NoError(t, root.Set(dyn.V("map1"), dyn.V(map1))) + require.NoError(t, root.Set(dyn.V("seq1"), dyn.V(seq1))) // Marshal without indent. b, err := Marshal(dyn.V(root)) diff --git a/libs/dyn/mapping.go b/libs/dyn/mapping.go index f9f2d2e97..3c7c4e96e 100644 --- a/libs/dyn/mapping.go +++ b/libs/dyn/mapping.go @@ -41,7 +41,7 @@ func newMappingWithSize(size int) Mapping { func newMappingFromGoMap(vin map[string]Value) Mapping { m := newMappingWithSize(len(vin)) for k, v := range vin { - m.Set(V(k), v) + m.Set(V(k), v) //nolint:errcheck } return m } @@ -94,7 +94,7 @@ func (m *Mapping) GetByString(skey string) (Value, bool) { // If the key already exists, the value is updated. // If the key does not exist, a new key-value pair is added. // The key must be a string, otherwise an error is returned. -func (m *Mapping) Set(key Value, value Value) error { +func (m *Mapping) Set(key, value Value) error { skey, ok := key.AsString() if !ok { return fmt.Errorf("key must be a string, got %s", key.Kind()) @@ -144,6 +144,6 @@ func (m Mapping) Clone() Mapping { // Merge merges the key-value pairs from another Mapping into the current Mapping. func (m *Mapping) Merge(n Mapping) { for _, p := range n.pairs { - m.Set(p.Key, p.Value) + m.Set(p.Key, p.Value) //nolint:errcheck } } diff --git a/libs/dyn/merge/merge.go b/libs/dyn/merge/merge.go index 29decd779..72d9a7d28 100644 --- a/libs/dyn/merge/merge.go +++ b/libs/dyn/merge/merge.go @@ -88,10 +88,10 @@ func mergeMap(a, b dyn.Value) (dyn.Value, error) { if err != nil { return dyn.InvalidValue, err } - out.Set(pk, merged) + out.Set(pk, merged) //nolint:errcheck } else { // Otherwise, just set the value. - out.Set(pk, pv) + out.Set(pk, pv) //nolint:errcheck } } @@ -111,6 +111,7 @@ func mergeSequence(a, b dyn.Value) (dyn.Value, error) { // Preserve the location of the first value. Accumulate the locations of the second value. return dyn.NewValue(out, a.Locations()).AppendLocationsFromValue(b), nil } + func mergePrimitive(a, b dyn.Value) (dyn.Value, error) { // Merging primitive values means using the incoming value. return b.AppendLocationsFromValue(a), nil diff --git a/libs/dyn/merge/merge_test.go b/libs/dyn/merge/merge_test.go index 4a4bf9e6c..bfe772016 100644 --- a/libs/dyn/merge/merge_test.go +++ b/libs/dyn/merge/merge_test.go @@ -75,7 +75,6 @@ func TestMergeMaps(t *testing.T) { assert.Equal(t, l1, out.Get("foo").Location()) assert.Equal(t, l2, out.Get("qux").Location()) } - } func TestMergeMapsNil(t *testing.T) { diff --git a/libs/dyn/merge/override.go b/libs/dyn/merge/override.go index 7a8667cd6..ca62c7305 100644 --- a/libs/dyn/merge/override.go +++ b/libs/dyn/merge/override.go @@ -23,7 +23,7 @@ import ( type OverrideVisitor struct { VisitDelete func(valuePath dyn.Path, left dyn.Value) error VisitInsert func(valuePath dyn.Path, right dyn.Value) (dyn.Value, error) - VisitUpdate func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) + VisitUpdate func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) } var ErrOverrideUndoDelete = errors.New("undo delete operation") @@ -31,11 +31,11 @@ var ErrOverrideUndoDelete = errors.New("undo delete operation") // Override overrides value 'leftRoot' with 'rightRoot', keeping 'location' if values // haven't changed. Preserving 'location' is important to preserve the original source of the value // for error reporting. -func Override(leftRoot dyn.Value, rightRoot dyn.Value, visitor OverrideVisitor) (dyn.Value, error) { +func Override(leftRoot, rightRoot dyn.Value, visitor OverrideVisitor) (dyn.Value, error) { return override(dyn.EmptyPath, leftRoot, rightRoot, visitor) } -func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor OverrideVisitor) (dyn.Value, error) { +func override(basePath dyn.Path, left, right dyn.Value, visitor OverrideVisitor) (dyn.Value, error) { if left.Kind() != right.Kind() { return visitor.VisitUpdate(basePath, left, right) } @@ -46,7 +46,6 @@ func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor Overri switch left.Kind() { case dyn.KindMap: merged, err := overrideMapping(basePath, left.MustMap(), right.MustMap(), visitor) - if err != nil { return dyn.InvalidValue, err } @@ -57,7 +56,6 @@ func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor Overri // some sequences are keyed, and we can detect which elements are added/removed/updated, // but we don't have this information merged, err := overrideSequence(basePath, left.MustSequence(), right.MustSequence(), visitor) - if err != nil { return dyn.InvalidValue, err } @@ -107,7 +105,7 @@ func override(basePath dyn.Path, left dyn.Value, right dyn.Value, visitor Overri return dyn.InvalidValue, fmt.Errorf("unexpected kind %s at %s", left.Kind(), basePath.String()) } -func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dyn.Mapping, visitor OverrideVisitor) (dyn.Mapping, error) { +func overrideMapping(basePath dyn.Path, leftMapping, rightMapping dyn.Mapping, visitor OverrideVisitor) (dyn.Mapping, error) { out := dyn.NewMapping() for _, leftPair := range leftMapping.Pairs() { @@ -136,14 +134,12 @@ func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dy if leftPair, ok := leftMapping.GetPair(rightPair.Key); ok { path := basePath.Append(dyn.Key(rightPair.Key.MustString())) newValue, err := override(path, leftPair.Value, rightPair.Value, visitor) - if err != nil { return dyn.NewMapping(), err } // key was there before, so keep its location err = out.Set(leftPair.Key, newValue) - if err != nil { return dyn.NewMapping(), err } @@ -151,13 +147,11 @@ func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dy path := basePath.Append(dyn.Key(rightPair.Key.MustString())) newValue, err := visitor.VisitInsert(path, rightPair.Value) - if err != nil { return dyn.NewMapping(), err } err = out.Set(rightPair.Key, newValue) - if err != nil { return dyn.NewMapping(), err } @@ -167,14 +161,13 @@ func overrideMapping(basePath dyn.Path, leftMapping dyn.Mapping, rightMapping dy return out, nil } -func overrideSequence(basePath dyn.Path, left []dyn.Value, right []dyn.Value, visitor OverrideVisitor) ([]dyn.Value, error) { +func overrideSequence(basePath dyn.Path, left, right []dyn.Value, visitor OverrideVisitor) ([]dyn.Value, error) { minLen := min(len(left), len(right)) var values []dyn.Value for i := 0; i < minLen; i++ { path := basePath.Append(dyn.Index(i)) merged, err := override(path, left[i], right[i], visitor) - if err != nil { return nil, err } @@ -186,7 +179,6 @@ func overrideSequence(basePath dyn.Path, left []dyn.Value, right []dyn.Value, vi for i := minLen; i < len(right); i++ { path := basePath.Append(dyn.Index(i)) newValue, err := visitor.VisitInsert(path, right[i]) - if err != nil { return nil, err } diff --git a/libs/dyn/merge/override_test.go b/libs/dyn/merge/override_test.go index 264c32e5e..ea161d27c 100644 --- a/libs/dyn/merge/override_test.go +++ b/libs/dyn/merge/override_test.go @@ -432,10 +432,12 @@ func TestOverride_PreserveMappingKeys(t *testing.T) { rightValueLocation := dyn.Location{File: "right.yml", Line: 3, Column: 1} left := dyn.NewMapping() - left.Set(dyn.NewValue("a", []dyn.Location{leftKeyLocation}), dyn.NewValue(42, []dyn.Location{leftValueLocation})) + err := left.Set(dyn.NewValue("a", []dyn.Location{leftKeyLocation}), dyn.NewValue(42, []dyn.Location{leftValueLocation})) + require.NoError(t, err) right := dyn.NewMapping() - right.Set(dyn.NewValue("a", []dyn.Location{rightKeyLocation}), dyn.NewValue(7, []dyn.Location{rightValueLocation})) + err = right.Set(dyn.NewValue("a", []dyn.Location{rightKeyLocation}), dyn.NewValue(7, []dyn.Location{rightValueLocation})) + require.NoError(t, err) state, visitor := createVisitor(visitorOpts{}) @@ -482,7 +484,7 @@ func createVisitor(opts visitorOpts) (*visitorState, OverrideVisitor) { s := visitorState{} return &s, OverrideVisitor{ - VisitUpdate: func(valuePath dyn.Path, left dyn.Value, right dyn.Value) (dyn.Value, error) { + VisitUpdate: func(valuePath dyn.Path, left, right dyn.Value) (dyn.Value, error) { s.updated = append(s.updated, valuePath.String()) if opts.error != nil { diff --git a/libs/dyn/pattern.go b/libs/dyn/pattern.go index aecdc3ca6..2d2e9cae7 100644 --- a/libs/dyn/pattern.go +++ b/libs/dyn/pattern.go @@ -69,7 +69,7 @@ func (c anyKeyComponent) visit(v Value, prefix Path, suffix Pattern, opts visitO return InvalidValue, err } - m.Set(pk, nv) + m.Set(pk, nv) //nolint:errcheck } return NewValue(m, v.Locations()), nil diff --git a/libs/dyn/value_test.go b/libs/dyn/value_test.go index 6a0a27b8d..86e65858e 100644 --- a/libs/dyn/value_test.go +++ b/libs/dyn/value_test.go @@ -25,11 +25,11 @@ func TestValueAsMap(t *testing.T) { _, ok := zeroValue.AsMap() assert.False(t, ok) - var intValue = dyn.V(1) + intValue := dyn.V(1) _, ok = intValue.AsMap() assert.False(t, ok) - var mapValue = dyn.NewValue( + mapValue := dyn.NewValue( map[string]dyn.Value{ "key": dyn.NewValue( "value", @@ -46,6 +46,6 @@ func TestValueAsMap(t *testing.T) { func TestValueIsValid(t *testing.T) { var zeroValue dyn.Value assert.False(t, zeroValue.IsValid()) - var intValue = dyn.V(1) + intValue := dyn.V(1) assert.True(t, intValue.IsValid()) } diff --git a/libs/dyn/visit.go b/libs/dyn/visit.go index 38adec24f..95515115e 100644 --- a/libs/dyn/visit.go +++ b/libs/dyn/visit.go @@ -122,7 +122,7 @@ func (component pathComponent) visit(v Value, prefix Path, suffix Pattern, opts // Return an updated map value. m = m.Clone() - m.Set(V(component.key), nv) + m.Set(V(component.key), nv) //nolint:errcheck return Value{ v: m, k: KindMap, diff --git a/libs/dyn/visit_map.go b/libs/dyn/visit_map.go index 3f0cded03..db4526038 100644 --- a/libs/dyn/visit_map.go +++ b/libs/dyn/visit_map.go @@ -25,7 +25,7 @@ func Foreach(fn MapFunc) MapFunc { if err != nil { return InvalidValue, err } - m.Set(pk, nv) + m.Set(pk, nv) //nolint:errcheck } return NewValue(m, v.Locations()), nil case KindSequence: diff --git a/libs/dyn/visit_map_test.go b/libs/dyn/visit_map_test.go index 2cea0913b..d62327d6f 100644 --- a/libs/dyn/visit_map_test.go +++ b/libs/dyn/visit_map_test.go @@ -71,7 +71,7 @@ func TestMapFuncOnMap(t *testing.T) { }, vbar.AsAny()) // Return error from map function. - var ref = fmt.Errorf("error") + ref := fmt.Errorf("error") verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Key("foo")), func(_ dyn.Path, v dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, ref }) @@ -137,7 +137,7 @@ func TestMapFuncOnSequence(t *testing.T) { assert.Equal(t, []any{42, 45}, v1.AsAny()) // Return error from map function. - var ref = fmt.Errorf("error") + ref := fmt.Errorf("error") verr, err := dyn.MapByPath(vin, dyn.NewPath(dyn.Index(0)), func(_ dyn.Path, v dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, ref }) @@ -211,7 +211,7 @@ func TestMapForeachOnMapError(t *testing.T) { }) // Check that an error from the map function propagates. - var ref = fmt.Errorf("error") + ref := fmt.Errorf("error") _, err := dyn.Map(vin, ".", dyn.Foreach(func(_ dyn.Path, v dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, ref })) @@ -255,7 +255,7 @@ func TestMapForeachOnSequenceError(t *testing.T) { }) // Check that an error from the map function propagates. - var ref = fmt.Errorf("error") + ref := fmt.Errorf("error") _, err := dyn.Map(vin, ".", dyn.Foreach(func(_ dyn.Path, v dyn.Value) (dyn.Value, error) { return dyn.InvalidValue, ref })) diff --git a/libs/dyn/visit_set.go b/libs/dyn/visit_set.go index b086fb8a9..9991d311f 100644 --- a/libs/dyn/visit_set.go +++ b/libs/dyn/visit_set.go @@ -41,7 +41,7 @@ func SetByPath(v Value, p Path, nv Value) (Value, error) { // Return an updated map value. m = m.Clone() - m.Set(V(component.key), nv) + m.Set(V(component.key), nv) //nolint:errcheck return Value{ v: m, k: KindMap, diff --git a/libs/dyn/walk.go b/libs/dyn/walk.go index c51a11e22..b3576e088 100644 --- a/libs/dyn/walk.go +++ b/libs/dyn/walk.go @@ -45,7 +45,7 @@ func walk(v Value, p Path, fn func(p Path, v Value) (Value, error)) (Value, erro if err != nil { return InvalidValue, err } - out.Set(pk, nv) + out.Set(pk, nv) //nolint:errcheck } v.v = out case KindSequence: diff --git a/libs/dyn/yamlloader/loader.go b/libs/dyn/yamlloader/loader.go index a77ee0744..fe58d6dfb 100644 --- a/libs/dyn/yamlloader/loader.go +++ b/libs/dyn/yamlloader/loader.go @@ -129,7 +129,7 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro return dyn.InvalidValue, err } - acc.Set(k, v) + acc.Set(k, v) //nolint:errcheck } if merge == nil { @@ -137,8 +137,8 @@ func (d *loader) loadMapping(node *yaml.Node, loc dyn.Location) (dyn.Value, erro } // Build location for the merge node. - var mloc = d.location(merge) - var merr = errorf(mloc, "map merge requires map or sequence of maps as the value") + mloc := d.location(merge) + merr := errorf(mloc, "map merge requires map or sequence of maps as the value") // Flatten the merge node into a slice of nodes. // It can be either a single node or a sequence of nodes. diff --git a/libs/dyn/yamlloader/yaml_spec_test.go b/libs/dyn/yamlloader/yaml_spec_test.go index 2a5ae817f..d9997f702 100644 --- a/libs/dyn/yamlloader/yaml_spec_test.go +++ b/libs/dyn/yamlloader/yaml_spec_test.go @@ -777,7 +777,8 @@ func TestYAMLSpecExample_2_27(t *testing.T) { ), }, []dyn.Location{{File: file, Line: 22, Column: 3}}, - )}, + ), + }, []dyn.Location{{File: file, Line: 18, Column: 1}}, ), "tax": dyn.NewValue( diff --git a/libs/dyn/yamlsaver/saver.go b/libs/dyn/yamlsaver/saver.go index 0fd81d534..7398e2594 100644 --- a/libs/dyn/yamlsaver/saver.go +++ b/libs/dyn/yamlsaver/saver.go @@ -27,7 +27,7 @@ func NewSaverWithStyle(nodesWithStyle map[string]yaml.Style) *saver { } func (s *saver) SaveAsYAML(data any, filename string, force bool) error { - err := os.MkdirAll(filepath.Dir(filename), 0755) + err := os.MkdirAll(filepath.Dir(filename), 0o755) if err != nil { return err } diff --git a/libs/dyn/yamlsaver/saver_test.go b/libs/dyn/yamlsaver/saver_test.go index aa481c20b..89bd5c31e 100644 --- a/libs/dyn/yamlsaver/saver_test.go +++ b/libs/dyn/yamlsaver/saver_test.go @@ -11,7 +11,7 @@ import ( func TestMarshalNilValue(t *testing.T) { s := NewSaver() - var nilValue = dyn.NilValue + nilValue := dyn.NilValue v, err := s.toYamlNode(nilValue) assert.NoError(t, err) assert.Equal(t, "null", v.Value) @@ -19,7 +19,7 @@ func TestMarshalNilValue(t *testing.T) { func TestMarshalIntValue(t *testing.T) { s := NewSaver() - var intValue = dyn.V(1) + intValue := dyn.V(1) v, err := s.toYamlNode(intValue) assert.NoError(t, err) assert.Equal(t, "1", v.Value) @@ -28,7 +28,7 @@ func TestMarshalIntValue(t *testing.T) { func TestMarshalFloatValue(t *testing.T) { s := NewSaver() - var floatValue = dyn.V(1.0) + floatValue := dyn.V(1.0) v, err := s.toYamlNode(floatValue) assert.NoError(t, err) assert.Equal(t, "1", v.Value) @@ -37,7 +37,7 @@ func TestMarshalFloatValue(t *testing.T) { func TestMarshalBoolValue(t *testing.T) { s := NewSaver() - var boolValue = dyn.V(true) + boolValue := dyn.V(true) v, err := s.toYamlNode(boolValue) assert.NoError(t, err) assert.Equal(t, "true", v.Value) @@ -49,7 +49,7 @@ func TestMarshalTimeValue(t *testing.T) { require.NoError(t, err) s := NewSaver() - var timeValue = dyn.V(tm) + timeValue := dyn.V(tm) v, err := s.toYamlNode(timeValue) assert.NoError(t, err) assert.Equal(t, "1970-01-01", v.Value) @@ -58,7 +58,7 @@ func TestMarshalTimeValue(t *testing.T) { func TestMarshalSequenceValue(t *testing.T) { s := NewSaver() - var sequenceValue = dyn.NewValue( + sequenceValue := dyn.NewValue( []dyn.Value{ dyn.NewValue("value1", []dyn.Location{{File: "file", Line: 1, Column: 2}}), dyn.NewValue("value2", []dyn.Location{{File: "file", Line: 2, Column: 2}}), @@ -74,7 +74,7 @@ func TestMarshalSequenceValue(t *testing.T) { func TestMarshalStringValue(t *testing.T) { s := NewSaver() - var stringValue = dyn.V("value") + stringValue := dyn.V("value") v, err := s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "value", v.Value) @@ -83,7 +83,7 @@ func TestMarshalStringValue(t *testing.T) { func TestMarshalMapValue(t *testing.T) { s := NewSaver() - var mapValue = dyn.NewValue( + mapValue := dyn.NewValue( map[string]dyn.Value{ "key3": dyn.NewValue("value3", []dyn.Location{{File: "file", Line: 3, Column: 2}}), "key2": dyn.NewValue("value2", []dyn.Location{{File: "file", Line: 2, Column: 2}}), @@ -107,7 +107,7 @@ func TestMarshalMapValue(t *testing.T) { func TestMarshalNestedValues(t *testing.T) { s := NewSaver() - var mapValue = dyn.NewValue( + mapValue := dyn.NewValue( map[string]dyn.Value{ "key1": dyn.NewValue( map[string]dyn.Value{ @@ -129,14 +129,14 @@ func TestMarshalNestedValues(t *testing.T) { func TestMarshalHexadecimalValueIsQuoted(t *testing.T) { s := NewSaver() - var hexValue = dyn.V(0x123) + hexValue := dyn.V(0x123) v, err := s.toYamlNode(hexValue) assert.NoError(t, err) assert.Equal(t, "291", v.Value) assert.Equal(t, yaml.Style(0), v.Style) assert.Equal(t, yaml.ScalarNode, v.Kind) - var stringValue = dyn.V("0x123") + stringValue := dyn.V("0x123") v, err = s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "0x123", v.Value) @@ -146,14 +146,14 @@ func TestMarshalHexadecimalValueIsQuoted(t *testing.T) { func TestMarshalBinaryValueIsQuoted(t *testing.T) { s := NewSaver() - var binaryValue = dyn.V(0b101) + binaryValue := dyn.V(0b101) v, err := s.toYamlNode(binaryValue) assert.NoError(t, err) assert.Equal(t, "5", v.Value) assert.Equal(t, yaml.Style(0), v.Style) assert.Equal(t, yaml.ScalarNode, v.Kind) - var stringValue = dyn.V("0b101") + stringValue := dyn.V("0b101") v, err = s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "0b101", v.Value) @@ -163,14 +163,14 @@ func TestMarshalBinaryValueIsQuoted(t *testing.T) { func TestMarshalOctalValueIsQuoted(t *testing.T) { s := NewSaver() - var octalValue = dyn.V(0123) + octalValue := dyn.V(0o123) v, err := s.toYamlNode(octalValue) assert.NoError(t, err) assert.Equal(t, "83", v.Value) assert.Equal(t, yaml.Style(0), v.Style) assert.Equal(t, yaml.ScalarNode, v.Kind) - var stringValue = dyn.V("0123") + stringValue := dyn.V("0123") v, err = s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "0123", v.Value) @@ -180,14 +180,14 @@ func TestMarshalOctalValueIsQuoted(t *testing.T) { func TestMarshalFloatValueIsQuoted(t *testing.T) { s := NewSaver() - var floatValue = dyn.V(1.0) + floatValue := dyn.V(1.0) v, err := s.toYamlNode(floatValue) assert.NoError(t, err) assert.Equal(t, "1", v.Value) assert.Equal(t, yaml.Style(0), v.Style) assert.Equal(t, yaml.ScalarNode, v.Kind) - var stringValue = dyn.V("1.0") + stringValue := dyn.V("1.0") v, err = s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "1.0", v.Value) @@ -197,14 +197,14 @@ func TestMarshalFloatValueIsQuoted(t *testing.T) { func TestMarshalBoolValueIsQuoted(t *testing.T) { s := NewSaver() - var boolValue = dyn.V(true) + boolValue := dyn.V(true) v, err := s.toYamlNode(boolValue) assert.NoError(t, err) assert.Equal(t, "true", v.Value) assert.Equal(t, yaml.Style(0), v.Style) assert.Equal(t, yaml.ScalarNode, v.Kind) - var stringValue = dyn.V("true") + stringValue := dyn.V("true") v, err = s.toYamlNode(stringValue) assert.NoError(t, err) assert.Equal(t, "true", v.Value) @@ -217,7 +217,7 @@ func TestCustomStylingWithNestedMap(t *testing.T) { "styled": yaml.DoubleQuotedStyle, }) - var styledMap = dyn.NewValue( + styledMap := dyn.NewValue( map[string]dyn.Value{ "key1": dyn.NewValue("value1", []dyn.Location{{File: "file", Line: 1, Column: 2}}), "key2": dyn.NewValue("value2", []dyn.Location{{File: "file", Line: 2, Column: 2}}), @@ -225,7 +225,7 @@ func TestCustomStylingWithNestedMap(t *testing.T) { []dyn.Location{{File: "file", Line: -2, Column: 2}}, ) - var unstyledMap = dyn.NewValue( + unstyledMap := dyn.NewValue( map[string]dyn.Value{ "key3": dyn.NewValue("value3", []dyn.Location{{File: "file", Line: 1, Column: 2}}), "key4": dyn.NewValue("value4", []dyn.Location{{File: "file", Line: 2, Column: 2}}), @@ -233,7 +233,7 @@ func TestCustomStylingWithNestedMap(t *testing.T) { []dyn.Location{{File: "file", Line: -1, Column: 2}}, ) - var val = dyn.NewValue( + val := dyn.NewValue( map[string]dyn.Value{ "styled": styledMap, "unstyled": unstyledMap, diff --git a/libs/env/loader.go b/libs/env/loader.go index f441ffa15..74c54cee8 100644 --- a/libs/env/loader.go +++ b/libs/env/loader.go @@ -43,7 +43,9 @@ func (le *configLoader) Configure(cfg *config.Config) error { if v == "" { continue } - a.Set(cfg, v) + if err := a.Set(cfg, v); err != nil { + return err + } } } return nil diff --git a/libs/exec/exec.go b/libs/exec/exec.go index 8e4633271..466117e60 100644 --- a/libs/exec/exec.go +++ b/libs/exec/exec.go @@ -10,9 +10,11 @@ import ( type ExecutableType string -const BashExecutable ExecutableType = `bash` -const ShExecutable ExecutableType = `sh` -const CmdExecutable ExecutableType = `cmd` +const ( + BashExecutable ExecutableType = `bash` + ShExecutable ExecutableType = `sh` + CmdExecutable ExecutableType = `cmd` +) var finders map[ExecutableType](func() (shell, error)) = map[ExecutableType](func() (shell, error)){ BashExecutable: newBashShell, diff --git a/libs/exec/exec_test.go b/libs/exec/exec_test.go index ad54601d0..e75c158bd 100644 --- a/libs/exec/exec_test.go +++ b/libs/exec/exec_test.go @@ -12,6 +12,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestExecutorWithSimpleInput(t *testing.T) { @@ -86,9 +87,11 @@ func testExecutorWithShell(t *testing.T, shell string) { tmpDir := t.TempDir() t.Setenv("PATH", tmpDir) if runtime.GOOS == "windows" { - os.Symlink(p, fmt.Sprintf("%s/%s.exe", tmpDir, shell)) + err = os.Symlink(p, fmt.Sprintf("%s/%s.exe", tmpDir, shell)) + require.NoError(t, err) } else { - os.Symlink(p, fmt.Sprintf("%s/%s", tmpDir, shell)) + err = os.Symlink(p, fmt.Sprintf("%s/%s", tmpDir, shell)) + require.NoError(t, err) } executor, err := NewCommandExecutor(".") diff --git a/libs/exec/shell.go b/libs/exec/shell.go index f5d176896..ee29eac8a 100644 --- a/libs/exec/shell.go +++ b/libs/exec/shell.go @@ -36,7 +36,7 @@ func findShell() (shell, error) { return nil, errors.New("no shell found") } -func createTempScript(command string, extension string) (string, error) { +func createTempScript(command, extension string) (string, error) { file, err := os.CreateTemp(os.TempDir(), "cli-exec*"+extension) if err != nil { return "", err diff --git a/libs/filer/filer.go b/libs/filer/filer.go index b5be4c3c2..83dc560cb 100644 --- a/libs/filer/filer.go +++ b/libs/filer/filer.go @@ -103,8 +103,7 @@ func (err DirectoryNotEmptyError) Is(other error) bool { return other == fs.ErrInvalid } -type CannotDeleteRootError struct { -} +type CannotDeleteRootError struct{} func (err CannotDeleteRootError) Error() string { return "unable to delete filer root" diff --git a/libs/filer/local_client.go b/libs/filer/local_client.go index 8b25345fc..385aa6924 100644 --- a/libs/filer/local_client.go +++ b/libs/filer/local_client.go @@ -29,7 +29,7 @@ func (w *LocalClient) Write(ctx context.Context, name string, reader io.Reader, } // Retrieve permission mask from the [WriteMode], if present. - perm := fs.FileMode(0644) + perm := fs.FileMode(0o644) for _, m := range mode { bits := m & writeModePerm if bits != 0 { @@ -47,7 +47,7 @@ func (w *LocalClient) Write(ctx context.Context, name string, reader io.Reader, f, err := os.OpenFile(absPath, flags, perm) if errors.Is(err, fs.ErrNotExist) && slices.Contains(mode, CreateParentDirectories) { // Create parent directories if they don't exist. - err = os.MkdirAll(filepath.Dir(absPath), 0755) + err = os.MkdirAll(filepath.Dir(absPath), 0o755) if err != nil { return err } @@ -73,7 +73,6 @@ func (w *LocalClient) Write(ctx context.Context, name string, reader io.Reader, } return err - } func (w *LocalClient) Read(ctx context.Context, name string) (io.ReadCloser, error) { @@ -159,7 +158,7 @@ func (w *LocalClient) Mkdir(ctx context.Context, name string) error { return err } - return os.MkdirAll(dirPath, 0755) + return os.MkdirAll(dirPath, 0o755) } func (w *LocalClient) Stat(ctx context.Context, name string) (fs.FileInfo, error) { diff --git a/libs/filer/slice_test.go b/libs/filer/slice_test.go index 21d783483..2bdb3f7f5 100644 --- a/libs/filer/slice_test.go +++ b/libs/filer/slice_test.go @@ -12,11 +12,10 @@ func TestSliceWithout(t *testing.T) { assert.Equal(t, []int{2, 3}, sliceWithout([]int{1, 2, 3}, 1)) assert.Equal(t, []int{1, 3}, sliceWithout([]int{1, 2, 3}, 2)) assert.Equal(t, []int{1, 2}, sliceWithout([]int{1, 2, 3}, 3)) - } func TestSliceWithoutReturnsClone(t *testing.T) { - var ints = []int{1, 2, 3} + ints := []int{1, 2, 3} assert.Equal(t, []int{2, 3}, sliceWithout(ints, 1)) assert.Equal(t, []int{1, 2, 3}, ints) } diff --git a/libs/filer/workspace_files_extensions_client.go b/libs/filer/workspace_files_extensions_client.go index 2a6052091..9ee2722e1 100644 --- a/libs/filer/workspace_files_extensions_client.go +++ b/libs/filer/workspace_files_extensions_client.go @@ -52,7 +52,8 @@ func (w *workspaceFilesExtensionsClient) getNotebookStatByNameWithExt(ctx contex notebook.ExtensionR, notebook.ExtensionScala, notebook.ExtensionSql, - notebook.ExtensionJupyter}, ext) { + notebook.ExtensionJupyter, + }, ext) { return nil, nil } diff --git a/libs/filer/workspace_files_extensions_client_test.go b/libs/filer/workspace_files_extensions_client_test.go index 10c176b31..10a2bebf0 100644 --- a/libs/filer/workspace_files_extensions_client_test.go +++ b/libs/filer/workspace_files_extensions_client_test.go @@ -17,8 +17,9 @@ type mockApiClient struct { } func (m *mockApiClient) Do(ctx context.Context, method, path string, - headers map[string]string, request any, response any, - visitors ...func(*http.Request) error) error { + headers map[string]string, request, response any, + visitors ...func(*http.Request) error, +) error { args := m.Called(ctx, method, path, headers, request, response, visitors) // Set the http response from a value provided in the mock call. diff --git a/libs/flags/json_flag_test.go b/libs/flags/json_flag_test.go index 77530086a..b31324011 100644 --- a/libs/flags/json_flag_test.go +++ b/libs/flags/json_flag_test.go @@ -57,12 +57,13 @@ func TestJsonFlagFile(t *testing.T) { var request any var fpath string - var payload = []byte(`{"foo": "bar"}`) + payload := []byte(`{"foo": "bar"}`) { f, err := os.Create(path.Join(t.TempDir(), "file")) require.NoError(t, err) - f.Write(payload) + _, err = f.Write(payload) + require.NoError(t, err) f.Close() fpath = f.Name() } diff --git a/libs/flags/log_file_flag.go b/libs/flags/log_file_flag.go index 9e60353f0..d2fe51d91 100644 --- a/libs/flags/log_file_flag.go +++ b/libs/flags/log_file_flag.go @@ -48,7 +48,7 @@ func (f *realLogFile) Writer() io.Writer { } func (f *realLogFile) Open() error { - file, err := os.OpenFile(f.s, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0600) + file, err := os.OpenFile(f.s, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o600) if err != nil { return err } diff --git a/libs/folders/folders.go b/libs/folders/folders.go index c83c711d3..bbabc588c 100644 --- a/libs/folders/folders.go +++ b/libs/folders/folders.go @@ -8,7 +8,11 @@ import ( // FindDirWithLeaf returns the first directory that holds `leaf`, // traversing up to the root of the filesystem, starting at `dir`. -func FindDirWithLeaf(dir string, leaf string) (string, error) { +func FindDirWithLeaf(dir, leaf string) (string, error) { + dir, err := filepath.Abs(dir) + if err != nil { + return "", err + } for { _, err := os.Stat(filepath.Join(dir, leaf)) diff --git a/libs/git/config.go b/libs/git/config.go index fafd81bd6..f7ff057e1 100644 --- a/libs/git/config.go +++ b/libs/git/config.go @@ -155,8 +155,8 @@ func globalGitConfig() (*config, error) { // > are missing or unreadable they will be ignored. // // We therefore ignore the error return value for the calls below. - config.loadFile(vfs.MustNew(xdgConfigHome), "git/config") - config.loadFile(vfs.MustNew(config.home), ".gitconfig") + _ = config.loadFile(vfs.MustNew(xdgConfigHome), "git/config") + _ = config.loadFile(vfs.MustNew(config.home), ".gitconfig") return config, nil } diff --git a/libs/git/config_test.go b/libs/git/config_test.go index 3e6edf765..73f3431c9 100644 --- a/libs/git/config_test.go +++ b/libs/git/config_test.go @@ -113,7 +113,7 @@ func (h *testCoreExcludesHelper) initialize(t *testing.T) { t.Setenv("XDG_CONFIG_HOME", h.xdgConfigHome) xdgConfigHomeGit := filepath.Join(h.xdgConfigHome, "git") - err := os.MkdirAll(xdgConfigHomeGit, 0755) + err := os.MkdirAll(xdgConfigHomeGit, 0o755) require.NoError(t, err) } @@ -124,7 +124,7 @@ func (h *testCoreExcludesHelper) coreExcludesFile() (string, error) { } func (h *testCoreExcludesHelper) writeConfig(path, contents string) { - err := os.WriteFile(path, []byte(contents), 0644) + err := os.WriteFile(path, []byte(contents), 0o644) require.NoError(h, err) } diff --git a/libs/git/fileset_test.go b/libs/git/fileset_test.go index f4fd931fd..6d239edf5 100644 --- a/libs/git/fileset_test.go +++ b/libs/git/fileset_test.go @@ -56,7 +56,8 @@ func TestFileSetAddsCacheDirToGitIgnore(t *testing.T) { projectDir := t.TempDir() fileSet, err := NewFileSetAtRoot(vfs.MustNew(projectDir)) require.NoError(t, err) - fileSet.EnsureValidGitIgnoreExists() + err = fileSet.EnsureValidGitIgnoreExists() + require.NoError(t, err) gitIgnorePath := filepath.Join(projectDir, ".gitignore") assert.FileExists(t, gitIgnorePath) @@ -74,7 +75,8 @@ func TestFileSetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) { err = os.WriteFile(gitIgnorePath, []byte(".databricks"), 0o644) require.NoError(t, err) - fileSet.EnsureValidGitIgnoreExists() + err = fileSet.EnsureValidGitIgnoreExists() + require.NoError(t, err) b, err := os.ReadFile(gitIgnorePath) require.NoError(t, err) diff --git a/libs/git/ignore_test.go b/libs/git/ignore_test.go index 057c0cb2e..9e2713608 100644 --- a/libs/git/ignore_test.go +++ b/libs/git/ignore_test.go @@ -48,7 +48,7 @@ func TestIgnoreFileTaint(t *testing.T) { assert.False(t, ign) // Now create the .gitignore file. - err = os.WriteFile(gitIgnorePath, []byte("hello"), 0644) + err = os.WriteFile(gitIgnorePath, []byte("hello"), 0o644) require.NoError(t, err) // Verify that the match still doesn't happen (no spontaneous reload). diff --git a/libs/git/info.go b/libs/git/info.go index 13c298113..46e57be48 100644 --- a/libs/git/info.go +++ b/libs/git/info.go @@ -2,15 +2,12 @@ package git import ( "context" - "errors" - "io/fs" "net/http" - "os" "path" - "path/filepath" "strings" "github.com/databricks/cli/libs/dbr" + "github.com/databricks/cli/libs/folders" "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/vfs" "github.com/databricks/databricks-sdk-go" @@ -75,7 +72,6 @@ func fetchRepositoryInfoAPI(ctx context.Context, path string, w *databricks.Work }, &response, ) - if err != nil { return result, err } @@ -105,7 +101,7 @@ func ensureWorkspacePrefix(p string) string { func fetchRepositoryInfoDotGit(ctx context.Context, path string) (RepositoryInfo, error) { result := RepositoryInfo{} - rootDir, err := findLeafInTree(path, GitDirectoryName) + rootDir, err := folders.FindDirWithLeaf(path, GitDirectoryName) if rootDir == "" { return result, err } @@ -134,28 +130,3 @@ func fetchRepositoryInfoDotGit(ctx context.Context, path string) (RepositoryInfo return result, nil } - -func findLeafInTree(p string, leafName string) (string, error) { - var err error - for i := 0; i < 10000; i++ { - _, err = os.Stat(filepath.Join(p, leafName)) - - if err == nil { - // Found [leafName] in p - return p, nil - } - - // ErrNotExist means we continue traversal up the tree. - if errors.Is(err, fs.ErrNotExist) { - parent := filepath.Dir(p) - if parent == p { - return "", nil - } - p = parent - continue - } - break - } - - return "", err -} diff --git a/libs/git/reference.go b/libs/git/reference.go index 2165a9cda..e1126d4f2 100644 --- a/libs/git/reference.go +++ b/libs/git/reference.go @@ -12,8 +12,10 @@ import ( type ReferenceType string -var ErrNotAReferencePointer = fmt.Errorf("HEAD does not point to another reference") -var ErrNotABranch = fmt.Errorf("HEAD is not a reference to a git branch") +var ( + ErrNotAReferencePointer = fmt.Errorf("HEAD does not point to another reference") + ErrNotABranch = fmt.Errorf("HEAD is not a reference to a git branch") +) const ( // pointer to a secondary reference file path containing sha-1 object ID. @@ -30,8 +32,10 @@ type Reference struct { Content string } -const ReferencePrefix = "ref: " -const HeadPathPrefix = "refs/heads/" +const ( + ReferencePrefix = "ref: " + HeadPathPrefix = "refs/heads/" +) // asserts if a string is a 40 character hexadecimal encoded string func isSHA1(s string) bool { diff --git a/libs/git/reference_test.go b/libs/git/reference_test.go index 194d79333..bfa0e50e5 100644 --- a/libs/git/reference_test.go +++ b/libs/git/reference_test.go @@ -54,7 +54,8 @@ func TestReferenceLoadingForObjectID(t *testing.T) { f, err := os.Create(filepath.Join(tmp, "HEAD")) require.NoError(t, err) defer f.Close() - f.WriteString(strings.Repeat("e", 40) + "\r\n") + _, err = f.WriteString(strings.Repeat("e", 40) + "\r\n") + require.NoError(t, err) ref, err := LoadReferenceFile(vfs.MustNew(tmp), "HEAD") assert.NoError(t, err) @@ -67,7 +68,8 @@ func TestReferenceLoadingForReference(t *testing.T) { f, err := os.OpenFile(filepath.Join(tmp, "HEAD"), os.O_CREATE|os.O_WRONLY, os.ModePerm) require.NoError(t, err) defer f.Close() - f.WriteString("ref: refs/heads/foo\n") + _, err = f.WriteString("ref: refs/heads/foo\n") + require.NoError(t, err) ref, err := LoadReferenceFile(vfs.MustNew(tmp), "HEAD") assert.NoError(t, err) @@ -80,7 +82,8 @@ func TestReferenceLoadingFailsForInvalidContent(t *testing.T) { f, err := os.OpenFile(filepath.Join(tmp, "HEAD"), os.O_CREATE|os.O_WRONLY, os.ModePerm) require.NoError(t, err) defer f.Close() - f.WriteString("abc") + _, err = f.WriteString("abc") + require.NoError(t, err) _, err = LoadReferenceFile(vfs.MustNew(tmp), "HEAD") assert.ErrorContains(t, err, "unknown format for git HEAD") diff --git a/libs/git/repository_test.go b/libs/git/repository_test.go index 93d9a03dc..857df65a9 100644 --- a/libs/git/repository_test.go +++ b/libs/git/repository_test.go @@ -27,7 +27,7 @@ func newTestRepository(t *testing.T) *testRepository { require.NoError(t, err) defer f1.Close() - f1.WriteString( + _, err = f1.WriteString( `[core] repositoryformatversion = 0 filemode = true @@ -36,6 +36,7 @@ func newTestRepository(t *testing.T) *testRepository { ignorecase = true precomposeunicode = true `) + require.NoError(t, err) f2, err := os.Create(filepath.Join(tmp, ".git", "HEAD")) require.NoError(t, err) @@ -62,7 +63,7 @@ func (testRepo *testRepository) checkoutCommit(commitId string) { require.NoError(testRepo.t, err) } -func (testRepo *testRepository) addBranch(name string, latestCommit string) { +func (testRepo *testRepository) addBranch(name, latestCommit string) { // create dir for branch head reference branchDir := filepath.Join(testRepo.r.Root(), ".git", "refs", "heads") err := os.MkdirAll(branchDir, os.ModePerm) diff --git a/libs/git/view.go b/libs/git/view.go index 2eaba1f8b..db22dfc5d 100644 --- a/libs/git/view.go +++ b/libs/git/view.go @@ -113,7 +113,7 @@ func (v *View) EnsureValidGitIgnoreExists() error { // Create .gitignore with .databricks entry gitIgnorePath := filepath.Join(v.repo.Root(), v.targetPath, ".gitignore") - file, err := os.OpenFile(gitIgnorePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644) + file, err := os.OpenFile(gitIgnorePath, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0o644) if err != nil { return err } diff --git a/libs/git/view_test.go b/libs/git/view_test.go index 06f6f9419..96881fdee 100644 --- a/libs/git/view_test.go +++ b/libs/git/view_test.go @@ -20,7 +20,7 @@ func copyTestdata(t *testing.T, name string) string { require.NoError(t, err) if d.IsDir() { - err := os.MkdirAll(filepath.Join(tempDir, path), 0755) + err := os.MkdirAll(filepath.Join(tempDir, path), 0o755) require.NoError(t, err) return nil } @@ -46,7 +46,7 @@ func createFakeRepo(t *testing.T, testdataName string) string { absPath := copyTestdata(t, testdataName) // Add .git directory to make it look like a Git repository. - err := os.Mkdir(filepath.Join(absPath, ".git"), 0755) + err := os.Mkdir(filepath.Join(absPath, ".git"), 0o755) require.NoError(t, err) return absPath } diff --git a/libs/jsonschema/from_type_test.go b/libs/jsonschema/from_type_test.go index 0ddb1011a..cdfdcfd10 100644 --- a/libs/jsonschema/from_type_test.go +++ b/libs/jsonschema/from_type_test.go @@ -403,7 +403,8 @@ func TestFromTypeError(t *testing.T) { // Maps with non-string keys should panic. type mapOfInts map[int]int assert.PanicsWithValue(t, "found map with non-string key: int", func() { - FromType(reflect.TypeOf(mapOfInts{}), nil) + _, err := FromType(reflect.TypeOf(mapOfInts{}), nil) + require.NoError(t, err) }) // Unsupported types should return an error. diff --git a/libs/jsonschema/validate_type.go b/libs/jsonschema/validate_type.go index 125d6b20b..9f70498ba 100644 --- a/libs/jsonschema/validate_type.go +++ b/libs/jsonschema/validate_type.go @@ -39,9 +39,11 @@ func validateNumber(v any) error { } func validateInteger(v any) error { - if !slices.Contains([]reflect.Kind{reflect.Int, reflect.Int8, reflect.Int16, + if !slices.Contains([]reflect.Kind{ + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64}, + reflect.Uint32, reflect.Uint64, + }, reflect.TypeOf(v).Kind()) { return fmt.Errorf("expected type integer, but value is %#v", v) } diff --git a/libs/locker/locker.go b/libs/locker/locker.go index b0d65c42e..eb59c9f74 100644 --- a/libs/locker/locker.go +++ b/libs/locker/locker.go @@ -140,7 +140,7 @@ func (locker *Locker) Lock(ctx context.Context, isForced bool) error { return err } - var modes = []filer.WriteMode{ + modes := []filer.WriteMode{ // Always create parent directory if it doesn't yet exist. filer.CreateParentDirectories, } @@ -196,7 +196,7 @@ func (locker *Locker) Unlock(ctx context.Context, opts ...UnlockOption) error { return nil } -func CreateLocker(user string, targetDir string, w *databricks.WorkspaceClient) (*Locker, error) { +func CreateLocker(user, targetDir string, w *databricks.WorkspaceClient) (*Locker, error) { filer, err := filer.NewWorkspaceFilesClient(w, targetDir) if err != nil { return nil, err diff --git a/libs/log/context.go b/libs/log/context.go index d9e31d116..5e3e8ccb6 100644 --- a/libs/log/context.go +++ b/libs/log/context.go @@ -2,7 +2,6 @@ package log import ( "context" - "log/slog" ) diff --git a/libs/log/logger.go b/libs/log/logger.go index 43a30e92b..c1d307c89 100644 --- a/libs/log/logger.go +++ b/libs/log/logger.go @@ -3,10 +3,9 @@ package log import ( "context" "fmt" + "log/slog" "runtime" "time" - - "log/slog" ) // GetLogger returns either the logger configured on the context, @@ -31,6 +30,51 @@ func log(logger *slog.Logger, ctx context.Context, level slog.Level, msg string) _ = logger.Handler().Handle(ctx, r) } +// Trace logs a string using the context-local or global logger. +func Trace(ctx context.Context, msg string) { + logger := GetLogger(ctx) + if !logger.Enabled(ctx, LevelTrace) { + return + } + log(logger, ctx, LevelTrace, msg) +} + +// Debug logs a string using the context-local or global logger. +func Debug(ctx context.Context, msg string) { + logger := GetLogger(ctx) + if !logger.Enabled(ctx, LevelDebug) { + return + } + log(logger, ctx, LevelDebug, msg) +} + +// Info logs a string using the context-local or global logger. +func Info(ctx context.Context, msg string) { + logger := GetLogger(ctx) + if !logger.Enabled(ctx, LevelInfo) { + return + } + log(logger, ctx, LevelInfo, msg) +} + +// Warn logs a string using the context-local or global logger. +func Warn(ctx context.Context, msg string) { + logger := GetLogger(ctx) + if !logger.Enabled(ctx, LevelWarn) { + return + } + log(logger, ctx, LevelWarn, msg) +} + +// Error logs a string using the context-local or global logger. +func Error(ctx context.Context, msg string) { + logger := GetLogger(ctx) + if !logger.Enabled(ctx, LevelError) { + return + } + log(logger, ctx, LevelError, msg) +} + // Tracef logs a formatted string using the context-local or global logger. func Tracef(ctx context.Context, format string, v ...any) { logger := GetLogger(ctx) diff --git a/libs/log/sdk.go b/libs/log/sdk.go index e1b1ffed4..086f80f50 100644 --- a/libs/log/sdk.go +++ b/libs/log/sdk.go @@ -3,11 +3,10 @@ package log import ( "context" "fmt" + "log/slog" "runtime" "time" - "log/slog" - sdk "github.com/databricks/databricks-sdk-go/logger" ) diff --git a/libs/notebook/detect.go b/libs/notebook/detect.go index cd8680bfa..40c850945 100644 --- a/libs/notebook/detect.go +++ b/libs/notebook/detect.go @@ -46,7 +46,7 @@ func (f file) close() error { func (f file) readHeader() (string, error) { // Scan header line with some padding. - var buf = make([]byte, headerLength) + buf := make([]byte, headerLength) n, err := f.f.Read([]byte(buf)) if err != nil && err != io.EOF { return "", err diff --git a/libs/notebook/detect_jupyter_test.go b/libs/notebook/detect_jupyter_test.go index 4ff2aeff6..af29a2214 100644 --- a/libs/notebook/detect_jupyter_test.go +++ b/libs/notebook/detect_jupyter_test.go @@ -41,7 +41,7 @@ func TestDetectJupyterInvalidJSON(t *testing.T) { dir := t.TempDir() path := filepath.Join(dir, "file.ipynb") buf := make([]byte, 128) - err := os.WriteFile(path, buf, 0644) + err := os.WriteFile(path, buf, 0o644) require.NoError(t, err) // Garbage contents means not a notebook. @@ -55,7 +55,7 @@ func TestDetectJupyterNoCells(t *testing.T) { dir := t.TempDir() path := filepath.Join(dir, "file.ipynb") buf := []byte("{}") - err := os.WriteFile(path, buf, 0644) + err := os.WriteFile(path, buf, 0o644) require.NoError(t, err) // Garbage contents means not a notebook. @@ -69,7 +69,7 @@ func TestDetectJupyterOldVersion(t *testing.T) { dir := t.TempDir() path := filepath.Join(dir, "file.ipynb") buf := []byte(`{ "cells": [], "metadata": {}, "nbformat": 3 }`) - err := os.WriteFile(path, buf, 0644) + err := os.WriteFile(path, buf, 0o644) require.NoError(t, err) // Garbage contents means not a notebook. diff --git a/libs/notebook/detect_test.go b/libs/notebook/detect_test.go index 786c7e394..4ede7bf9b 100644 --- a/libs/notebook/detect_test.go +++ b/libs/notebook/detect_test.go @@ -78,7 +78,7 @@ func TestDetectEmptyFile(t *testing.T) { // Create empty file. dir := t.TempDir() path := filepath.Join(dir, "file.py") - err := os.WriteFile(path, nil, 0644) + err := os.WriteFile(path, nil, 0o644) require.NoError(t, err) // No contents means not a notebook. @@ -92,7 +92,7 @@ func TestDetectFileWithLongHeader(t *testing.T) { dir := t.TempDir() path := filepath.Join(dir, "file.py") buf := make([]byte, 128*1024) - err := os.WriteFile(path, buf, 0644) + err := os.WriteFile(path, buf, 0o644) require.NoError(t, err) // Garbage contents means not a notebook. diff --git a/libs/process/stub.go b/libs/process/stub.go index 8472f65d5..8ab6fd705 100644 --- a/libs/process/stub.go +++ b/libs/process/stub.go @@ -148,13 +148,20 @@ func (s *processStub) run(cmd *exec.Cmd) error { if !re.MatchString(norm) { continue } + err := resp.err if resp.stdout != "" { - cmd.Stdout.Write([]byte(resp.stdout)) + _, err1 := cmd.Stdout.Write([]byte(resp.stdout)) + if err == nil { + err = err1 + } } if resp.stderr != "" { - cmd.Stderr.Write([]byte(resp.stderr)) + _, err1 := cmd.Stderr.Write([]byte(resp.stderr)) + if err == nil { + err = err1 + } } - return resp.err + return err } if s.callback != nil { return s.callback(cmd) @@ -163,8 +170,12 @@ func (s *processStub) run(cmd *exec.Cmd) error { if s.reponseStub == zeroStub { return fmt.Errorf("no default process stub") } + err := s.reponseStub.err if s.reponseStub.stdout != "" { - cmd.Stdout.Write([]byte(s.reponseStub.stdout)) + _, err1 := cmd.Stdout.Write([]byte(s.reponseStub.stdout)) + if err == nil { + err = err1 + } } - return s.reponseStub.err + return err } diff --git a/libs/process/stub_test.go b/libs/process/stub_test.go index 65f59f817..81afa3a89 100644 --- a/libs/process/stub_test.go +++ b/libs/process/stub_test.go @@ -43,8 +43,14 @@ func TestStubCallback(t *testing.T) { ctx := context.Background() ctx, stub := process.WithStub(ctx) stub.WithCallback(func(cmd *exec.Cmd) error { - cmd.Stderr.Write([]byte("something...")) - cmd.Stdout.Write([]byte("else...")) + _, err := cmd.Stderr.Write([]byte("something...")) + if err != nil { + return err + } + _, err = cmd.Stdout.Write([]byte("else...")) + if err != nil { + return err + } return fmt.Errorf("yep") }) diff --git a/libs/python/detect_test.go b/libs/python/detect_test.go index 78c7067f7..485aa1875 100644 --- a/libs/python/detect_test.go +++ b/libs/python/detect_test.go @@ -14,13 +14,13 @@ func TestDetectVEnvExecutable(t *testing.T) { dir := t.TempDir() interpreterPath := interpreterPath(dir) - err := os.Mkdir(filepath.Dir(interpreterPath), 0755) + err := os.Mkdir(filepath.Dir(interpreterPath), 0o755) require.NoError(t, err) - err = os.WriteFile(interpreterPath, []byte(""), 0755) + err = os.WriteFile(interpreterPath, []byte(""), 0o755) require.NoError(t, err) - err = os.WriteFile(filepath.Join(dir, "pyvenv.cfg"), []byte(""), 0755) + err = os.WriteFile(filepath.Join(dir, "pyvenv.cfg"), []byte(""), 0o755) require.NoError(t, err) executable, err := DetectVEnvExecutable(dir) diff --git a/libs/python/interpreters.go b/libs/python/interpreters.go index 94f5074de..6071309a8 100644 --- a/libs/python/interpreters.go +++ b/libs/python/interpreters.go @@ -18,8 +18,10 @@ import ( var ErrNoPythonInterpreters = errors.New("no python3 interpreters found") -const officialMswinPython = "(Python Official) https://python.org/downloads/windows" -const microsoftStorePython = "(Microsoft Store) https://apps.microsoft.com/store/search?publisher=Python%20Software%20Foundation" +const ( + officialMswinPython = "(Python Official) https://python.org/downloads/windows" + microsoftStorePython = "(Microsoft Store) https://apps.microsoft.com/store/search?publisher=Python%20Software%20Foundation" +) const worldWriteable = 0o002 diff --git a/libs/python/interpreters_unix_test.go b/libs/python/interpreters_unix_test.go index e2b0a5a1c..8471644a1 100644 --- a/libs/python/interpreters_unix_test.go +++ b/libs/python/interpreters_unix_test.go @@ -34,13 +34,14 @@ func TestFilteringInterpreters(t *testing.T) { rogueBin := filepath.Join(t.TempDir(), "rogue-bin") err := os.Mkdir(rogueBin, 0o777) assert.NoError(t, err) - os.Chmod(rogueBin, 0o777) + err = os.Chmod(rogueBin, 0o777) + assert.NoError(t, err) raw, err := os.ReadFile("testdata/world-writeable/python8.4") assert.NoError(t, err) injectedBinary := filepath.Join(rogueBin, "python8.4") - err = os.WriteFile(injectedBinary, raw, 00777) + err = os.WriteFile(injectedBinary, raw, 0o0777) assert.NoError(t, err) t.Setenv("PATH", "testdata/other-binaries-filtered:"+rogueBin) diff --git a/libs/sync/diff.go b/libs/sync/diff.go index e91f7277e..d81a3ae65 100644 --- a/libs/sync/diff.go +++ b/libs/sync/diff.go @@ -20,7 +20,7 @@ func (d diff) IsEmpty() bool { // Compute operations required to make files in WSFS reflect current local files. // Takes into account changes since the last sync iteration. -func computeDiff(after *SnapshotState, before *SnapshotState) diff { +func computeDiff(after, before *SnapshotState) diff { d := &diff{ delete: make([]string, 0), rmdir: make([]string, 0), @@ -35,7 +35,7 @@ func computeDiff(after *SnapshotState, before *SnapshotState) diff { } // Add operators for tracked files that no longer exist. -func (d *diff) addRemovedFiles(after *SnapshotState, before *SnapshotState) { +func (d *diff) addRemovedFiles(after, before *SnapshotState) { for localName, remoteName := range before.LocalToRemoteNames { if _, ok := after.LocalToRemoteNames[localName]; !ok { d.delete = append(d.delete, remoteName) @@ -50,7 +50,7 @@ func (d *diff) addRemovedFiles(after *SnapshotState, before *SnapshotState) { // Cleanup previous remote files for files that had their remote targets change. For // example this is possible if you convert a normal python script to a notebook. -func (d *diff) addFilesWithRemoteNameChanged(after *SnapshotState, before *SnapshotState) { +func (d *diff) addFilesWithRemoteNameChanged(after, before *SnapshotState) { for localName, beforeRemoteName := range before.LocalToRemoteNames { afterRemoteName, ok := after.LocalToRemoteNames[localName] if ok && afterRemoteName != beforeRemoteName { @@ -60,7 +60,7 @@ func (d *diff) addFilesWithRemoteNameChanged(after *SnapshotState, before *Snaps } // Add operators for files that were not being tracked before. -func (d *diff) addNewFiles(after *SnapshotState, before *SnapshotState) { +func (d *diff) addNewFiles(after, before *SnapshotState) { for localName := range after.LastModifiedTimes { if _, ok := before.LastModifiedTimes[localName]; !ok { d.put = append(d.put, localName) @@ -74,7 +74,7 @@ func (d *diff) addNewFiles(after *SnapshotState, before *SnapshotState) { } // Add operators for files which had their contents updated. -func (d *diff) addUpdatedFiles(after *SnapshotState, before *SnapshotState) { +func (d *diff) addUpdatedFiles(after, before *SnapshotState) { for localName, modTime := range after.LastModifiedTimes { prevModTime, ok := before.LastModifiedTimes[localName] if ok && modTime.After(prevModTime) { diff --git a/libs/sync/event.go b/libs/sync/event.go index 8e5c0efa2..05821a477 100644 --- a/libs/sync/event.go +++ b/libs/sync/event.go @@ -73,7 +73,7 @@ func (e *EventStart) String() string { return fmt.Sprintf("Action: %s", e.EventChanges.String()) } -func newEventStart(seq int, put []string, delete []string) Event { +func newEventStart(seq int, put, delete []string) Event { return &EventStart{ EventBase: newEventBase(seq, EventTypeStart), EventChanges: &EventChanges{Put: put, Delete: delete}, @@ -133,7 +133,7 @@ func (e *EventSyncComplete) String() string { return "Complete" } -func newEventComplete(seq int, put []string, delete []string) Event { +func newEventComplete(seq int, put, delete []string) Event { return &EventSyncComplete{ EventBase: newEventBase(seq, EventTypeComplete), EventChanges: &EventChanges{Put: put, Delete: delete}, diff --git a/libs/sync/output.go b/libs/sync/output.go index c01b25ef6..e6ac8c56c 100644 --- a/libs/sync/output.go +++ b/libs/sync/output.go @@ -43,9 +43,9 @@ func TextOutput(ctx context.Context, ch <-chan Event, w io.Writer) { // Log only if something actually happened. // Sync events produce an empty string if nothing happened. if str := e.String(); str != "" { - bw.WriteString(str) - bw.WriteString("\n") - bw.Flush() + _, _ = bw.WriteString(str) + _, _ = bw.WriteString("\n") + _ = bw.Flush() } } } diff --git a/libs/sync/snapshot.go b/libs/sync/snapshot.go index f2920d8c2..a596531b9 100644 --- a/libs/sync/snapshot.go +++ b/libs/sync/snapshot.go @@ -2,6 +2,8 @@ package sync import ( "context" + "crypto/md5" + "encoding/hex" "encoding/json" "errors" "fmt" @@ -10,9 +12,6 @@ import ( "path/filepath" "time" - "crypto/md5" - "encoding/hex" - "github.com/databricks/cli/libs/fileset" "github.com/databricks/cli/libs/log" ) @@ -91,7 +90,7 @@ func GetFileName(host, remotePath string) string { func SnapshotPath(opts *SyncOptions) (string, error) { snapshotDir := filepath.Join(opts.SnapshotBasePath, syncSnapshotDirName) if _, err := os.Stat(snapshotDir); errors.Is(err, fs.ErrNotExist) { - err = os.MkdirAll(snapshotDir, 0755) + err = os.MkdirAll(snapshotDir, 0o755) if err != nil { return "", fmt.Errorf("failed to create config directory: %s", err) } @@ -122,7 +121,7 @@ func newSnapshot(ctx context.Context, opts *SyncOptions) (*Snapshot, error) { } func (s *Snapshot) Save(ctx context.Context) error { - f, err := os.OpenFile(s.snapshotPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0644) + f, err := os.OpenFile(s.snapshotPath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, 0o644) if err != nil { return fmt.Errorf("failed to create/open persisted sync snapshot file: %s", err) } diff --git a/libs/sync/snapshot_state.go b/libs/sync/snapshot_state.go index 09bb5b63e..d8660ee6a 100644 --- a/libs/sync/snapshot_state.go +++ b/libs/sync/snapshot_state.go @@ -51,7 +51,6 @@ func NewSnapshotState(localFiles []fileset.File) (*SnapshotState, error) { // Compute the remote name the file will have in WSFS remoteName := f.Relative isNotebook, err := f.IsNotebook() - if err != nil { // Ignore this file if we're unable to determine the notebook type. // Trying to upload such a file to the workspace would fail anyway. diff --git a/libs/sync/sync.go b/libs/sync/sync.go index 6bd26f224..dc2c8992a 100644 --- a/libs/sync/sync.go +++ b/libs/sync/sync.go @@ -117,7 +117,7 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { } var notifier EventNotifier - var outputWaitGroup = &stdsync.WaitGroup{} + outputWaitGroup := &stdsync.WaitGroup{} if opts.OutputHandler != nil { ch := make(chan Event, MaxRequestsInFlight) notifier = &ChannelNotifier{ch} diff --git a/libs/tags/gcp_test.go b/libs/tags/gcp_test.go index 89f4fd8e6..7c960acbb 100644 --- a/libs/tags/gcp_test.go +++ b/libs/tags/gcp_test.go @@ -38,7 +38,6 @@ func TestGcpNormalizeKey(t *testing.T) { assert.Equal(t, "test", gcpTag.NormalizeKey("test")) assert.Equal(t, "cafe", gcpTag.NormalizeKey("café 🍎?")) assert.Equal(t, "cafe_foo", gcpTag.NormalizeKey("__café_foo__")) - } func TestGcpNormalizeValue(t *testing.T) { diff --git a/libs/template/file_test.go b/libs/template/file_test.go index bd5f6d632..ced38c284 100644 --- a/libs/template/file_test.go +++ b/libs/template/file_test.go @@ -57,7 +57,7 @@ func TestTemplateInMemoryFilePersistToDisk(t *testing.T) { t.SkipNow() } ctx := context.Background() - testInMemoryFile(t, ctx, 0755) + testInMemoryFile(t, ctx, 0o755) } func TestTemplateInMemoryFilePersistToDiskForWindows(t *testing.T) { @@ -67,7 +67,7 @@ func TestTemplateInMemoryFilePersistToDiskForWindows(t *testing.T) { // we have separate tests for windows because of differences in valid // fs.FileMode values we can use for different operating systems. ctx := context.Background() - testInMemoryFile(t, ctx, 0666) + testInMemoryFile(t, ctx, 0o666) } func TestTemplateCopyFilePersistToDisk(t *testing.T) { @@ -75,7 +75,7 @@ func TestTemplateCopyFilePersistToDisk(t *testing.T) { t.SkipNow() } ctx := context.Background() - testCopyFile(t, ctx, 0644) + testCopyFile(t, ctx, 0o644) } func TestTemplateCopyFilePersistToDiskForWindows(t *testing.T) { @@ -85,5 +85,5 @@ func TestTemplateCopyFilePersistToDiskForWindows(t *testing.T) { // we have separate tests for windows because of differences in valid // fs.FileMode values we can use for different operating systems. ctx := context.Background() - testCopyFile(t, ctx, 0666) + testCopyFile(t, ctx, 0o666) } diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 7f7acbd24..4550e5fa2 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -31,9 +31,11 @@ type pair struct { v any } -var cachedUser *iam.User -var cachedIsServicePrincipal *bool -var cachedCatalog *string +var ( + cachedUser *iam.User + cachedIsServicePrincipal *bool + cachedCatalog *string +) // UUID that is stable for the duration of the template execution. This can be used // to populate the `bundle.uuid` field in databricks.yml by template authors. diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index 6c476c658..d98f40b24 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -158,12 +158,11 @@ func TestWorkspaceHost(t *testing.T) { assert.Len(t, r.files, 1) assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "https://myhost.com") assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "i3.xlarge") - } func TestWorkspaceHostNotConfigured(t *testing.T) { ctx := context.Background() - cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "", "template") + cmd := cmdio.NewIO(ctx, flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "", "template") ctx = cmdio.InContext(ctx, cmd) w := &databricks.WorkspaceClient{ @@ -178,5 +177,4 @@ func TestWorkspaceHostNotConfigured(t *testing.T) { err = r.walk() require.ErrorContains(t, err, "cannot determine target workspace") - } diff --git a/libs/template/materialize.go b/libs/template/materialize.go index ee30444a5..86a6a8c37 100644 --- a/libs/template/materialize.go +++ b/libs/template/materialize.go @@ -10,9 +10,11 @@ import ( "github.com/databricks/cli/libs/filer" ) -const libraryDirName = "library" -const templateDirName = "template" -const schemaFileName = "databricks_template_schema.json" +const ( + libraryDirName = "library" + templateDirName = "template" + schemaFileName = "databricks_template_schema.json" +) // This function materializes the input templates as a project, using user defined // configurations. diff --git a/libs/template/renderer.go b/libs/template/renderer.go index 0f30a67d0..5030cd9df 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -310,7 +310,7 @@ func (r *renderer) persistToDisk(ctx context.Context, out filer.Filer) error { if err == nil { return fmt.Errorf("failed to initialize template, one or more files already exist: %s", path) } - if err != nil && !errors.Is(err, fs.ErrNotExist) { + if !errors.Is(err, fs.ErrNotExist) { return fmt.Errorf("error while verifying file %s does not already exist: %w", path, err) } } diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index a4b9166da..eeb308732 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -27,7 +27,7 @@ import ( "github.com/stretchr/testify/require" ) -func assertFileContent(t *testing.T, path string, content string) { +func assertFileContent(t *testing.T, path, content string) { b, err := os.ReadFile(path) require.NoError(t, err) assert.Equal(t, content, string(b)) @@ -39,7 +39,7 @@ func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) { assert.Equal(t, perm, info.Mode().Perm()) } -func assertBuiltinTemplateValid(t *testing.T, template string, settings map[string]any, target string, isServicePrincipal bool, build bool, tempDir string) { +func assertBuiltinTemplateValid(t *testing.T, template string, settings map[string]any, target string, isServicePrincipal, build bool, tempDir string) { ctx := context.Background() templateFS, err := fs.Sub(builtinTemplates, path.Join("templates", template)) @@ -200,8 +200,7 @@ func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) { } func TestRendererExecuteTemplate(t *testing.T) { - templateText := - `"{{.count}} items are made of {{.Material}}". + templateText := `"{{.count}} items are made of {{.Material}}". {{if eq .Animal "sheep" }} Sheep wool is the best! {{else}} @@ -256,7 +255,6 @@ func TestRendererExecuteTemplateWithUnknownProperty(t *testing.T) { } func TestRendererIsSkipped(t *testing.T) { - skipPatterns := []string{"a*", "*yz", "def", "a/b/*"} // skipped paths @@ -319,22 +317,22 @@ func TestRendererPersistToDisk(t *testing.T) { skipPatterns: []string{"a/b/c", "mn*"}, files: []file{ &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "a/b/c", content: nil, }, &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "mno", content: nil, }, &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "a/b/d", content: []byte("123"), }, &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "mmnn", content: []byte("456"), }, @@ -350,9 +348,9 @@ func TestRendererPersistToDisk(t *testing.T) { assert.NoFileExists(t, filepath.Join(tmpDir, "mno")) assertFileContent(t, filepath.Join(tmpDir, "a", "b", "d"), "123") - assertFilePermissions(t, filepath.Join(tmpDir, "a", "b", "d"), 0444) + assertFilePermissions(t, filepath.Join(tmpDir, "a", "b", "d"), 0o444) assertFileContent(t, filepath.Join(tmpDir, "mmnn"), "456") - assertFilePermissions(t, filepath.Join(tmpDir, "mmnn"), 0444) + assertFilePermissions(t, filepath.Join(tmpDir, "mmnn"), 0o444) } func TestRendererWalk(t *testing.T) { @@ -520,8 +518,8 @@ func TestRendererReadsPermissionsBits(t *testing.T) { } assert.Len(t, r.files, 2) - assert.Equal(t, getPermissions(r, "script.sh"), fs.FileMode(0755)) - assert.Equal(t, getPermissions(r, "not-a-script"), fs.FileMode(0644)) + assert.Equal(t, getPermissions(r, "script.sh"), fs.FileMode(0o755)) + assert.Equal(t, getPermissions(r, "not-a-script"), fs.FileMode(0o644)) } func TestRendererErrorOnConflictingFile(t *testing.T) { @@ -537,7 +535,7 @@ func TestRendererErrorOnConflictingFile(t *testing.T) { skipPatterns: []string{}, files: []file{ &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "a", content: []byte("123"), }, @@ -563,7 +561,7 @@ func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { skipPatterns: []string{"a"}, files: []file{ &inMemoryFile{ - perm: 0444, + perm: 0o444, relPath: "a", content: []byte("123"), }, diff --git a/libs/template/templates/dbt-sql/template/{{.project_name}}/.vscode/settings.json.tmpl b/libs/template/templates/dbt-sql/template/{{.project_name}}/.vscode/settings.json.tmpl index 562ba136f..3eca01226 100644 --- a/libs/template/templates/dbt-sql/template/{{.project_name}}/.vscode/settings.json.tmpl +++ b/libs/template/templates/dbt-sql/template/{{.project_name}}/.vscode/settings.json.tmpl @@ -1,6 +1,5 @@ { "python.analysis.stubPath": ".vscode", - "databricks.python.envFile": "${workspaceFolder}/.env", "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------", "python.testing.pytestArgs": [ diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json index f19498daa..8ee87c30d 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json @@ -1,6 +1,5 @@ { "python.analysis.stubPath": ".vscode", - "databricks.python.envFile": "${workspaceFolder}/.env", "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------", "python.testing.pytestArgs": [ diff --git a/libs/template/templates/default-sql/template/{{.project_name}}/.vscode/settings.json.tmpl b/libs/template/templates/default-sql/template/{{.project_name}}/.vscode/settings.json.tmpl index c63af24b4..03a365f9d 100644 --- a/libs/template/templates/default-sql/template/{{.project_name}}/.vscode/settings.json.tmpl +++ b/libs/template/templates/default-sql/template/{{.project_name}}/.vscode/settings.json.tmpl @@ -1,6 +1,5 @@ { "python.analysis.stubPath": ".vscode", - "databricks.python.envFile": "${workspaceFolder}/.env", "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------", "python.testing.pytestArgs": [ diff --git a/libs/textutil/textutil_test.go b/libs/textutil/textutil_test.go index f6834a1ef..b9268c98b 100644 --- a/libs/textutil/textutil_test.go +++ b/libs/textutil/textutil_test.go @@ -50,7 +50,8 @@ func TestNormalizeString(t *testing.T) { { input: ".test//test..test", expected: "test_test_test", - }} + }, + } for _, c := range cases { assert.Equal(t, c.expected, NormalizeString(c.input)) diff --git a/libs/vfs/leaf.go b/libs/vfs/leaf.go deleted file mode 100644 index 8c11f9039..000000000 --- a/libs/vfs/leaf.go +++ /dev/null @@ -1,29 +0,0 @@ -package vfs - -import ( - "errors" - "io/fs" -) - -// FindLeafInTree returns the first path that holds `name`, -// traversing up to the root of the filesystem, starting at `p`. -func FindLeafInTree(p Path, name string) (Path, error) { - for p != nil { - _, err := fs.Stat(p, name) - - // No error means we found the leaf in p. - if err == nil { - return p, nil - } - - // ErrNotExist means we continue traversal up the tree. - if errors.Is(err, fs.ErrNotExist) { - p = p.Parent() - continue - } - - return nil, err - } - - return nil, fs.ErrNotExist -} diff --git a/libs/vfs/leaf_test.go b/libs/vfs/leaf_test.go deleted file mode 100644 index da9412ec0..000000000 --- a/libs/vfs/leaf_test.go +++ /dev/null @@ -1,38 +0,0 @@ -package vfs - -import ( - "os" - "path/filepath" - "testing" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestFindLeafInTree(t *testing.T) { - wd, err := os.Getwd() - require.NoError(t, err) - - root := filepath.Join(wd, "..", "..") - - // Find from working directory should work. - { - out, err := FindLeafInTree(MustNew(wd), ".git") - assert.NoError(t, err) - assert.Equal(t, root, out.Native()) - } - - // Find from project root itself should work. - { - out, err := FindLeafInTree(MustNew(root), ".git") - assert.NoError(t, err) - assert.Equal(t, root, out.Native()) - } - - // Find for something that doesn't exist should work. - { - out, err := FindLeafInTree(MustNew(root), "this-leaf-doesnt-exist-anywhere") - assert.ErrorIs(t, err, os.ErrNotExist) - assert.Equal(t, nil, out) - } -}