From a7a109a5d87281f29beec3a12a05692e483fe551 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 14 Jul 2023 11:43:20 +0200 Subject: [PATCH 001/139] Remove base path checks during sync (#576) ## Changes Earlier we removed recursive deletion from sync. This makes it safe enough for us to not restrict sync to just the namespace of the user. This PR removes that base path validation. Note: If the sync destination is under `/Repos` we still only create missing directories required if the path is under my namespace ie matches `/Repos/@me/` ## Tests Manually Before: ``` shreyas.goenka@THW32HFW6T hello-bundle % cli bundle deploy Starting upload of bundle files Error: path must be nested under /Users/shreyas.goenka@databricks.com or /Repos/shreyas.goenka@databricks.com ``` After: ``` shreyas.goenka@THW32HFW6T hello-bundle % cli bundle deploy Starting upload of bundle files Uploaded bundle files at /Shared/common-test/hello-bundle/files! Starting resource deployment Resource deployment completed! ``` --- libs/sync/path.go | 41 ----------------------------------------- libs/sync/path_test.go | 31 ------------------------------- 2 files changed, 72 deletions(-) diff --git a/libs/sync/path.go b/libs/sync/path.go index 7fd1b9a97..a04c28d30 100644 --- a/libs/sync/path.go +++ b/libs/sync/path.go @@ -13,42 +13,6 @@ import ( "github.com/databricks/databricks-sdk-go/service/workspace" ) -// Return if the child path is nested under the parent path. -func isPathNestedUnder(child, parent string) bool { - child = path.Clean(child) - parent = path.Clean(parent) - - // Traverse up the tree as long as "child" is contained in "parent". - for len(child) > len(parent) && strings.HasPrefix(child, parent) { - child = path.Dir(child) - if child == parent { - return true - } - } - return false -} - -// Check if the specified path is nested under one of the allowed base paths. -func checkPathNestedUnderBasePaths(me *iam.User, p string) error { - validBasePaths := []string{ - path.Clean(fmt.Sprintf("/Users/%s", me.UserName)), - path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)), - } - - givenBasePath := path.Clean(p) - match := false - for _, basePath := range validBasePaths { - if isPathNestedUnder(givenBasePath, basePath) { - match = true - break - } - } - if !match { - return fmt.Errorf("path must be nested under %s", strings.Join(validBasePaths, " or ")) - } - return nil -} - func repoPathForPath(me *iam.User, remotePath string) string { base := path.Clean(fmt.Sprintf("/Repos/%s", me.UserName)) remotePath = path.Clean(remotePath) @@ -66,11 +30,6 @@ func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClie return err } - err = checkPathNestedUnderBasePaths(me, remotePath) - if err != nil { - return err - } - // Ensure that the remote path exists. // If it is a repo, it has to exist. // If it is a workspace path, it may not exist. diff --git a/libs/sync/path_test.go b/libs/sync/path_test.go index 18475c926..2d492251f 100644 --- a/libs/sync/path_test.go +++ b/libs/sync/path_test.go @@ -7,37 +7,6 @@ import ( "github.com/stretchr/testify/assert" ) -func TestPathNestedUnderBasePaths(t *testing.T) { - me := iam.User{ - UserName: "jane@doe.com", - } - - // Not nested under allowed base paths. - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/.")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/..")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/john@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.comsuffix/foo")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos/")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Repos")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/.")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/..")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/john@doe.com")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.comsuffix/foo")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users/")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/Users")) - assert.Error(t, checkPathNestedUnderBasePaths(&me, "/")) - - // Nested under allowed base paths. - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/./foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Repos/jane@doe.com/foo/bar/qux")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/./foo")) - assert.NoError(t, checkPathNestedUnderBasePaths(&me, "/Users/jane@doe.com/foo/bar/qux")) -} - func TestPathToRepoPath(t *testing.T) { me := iam.User{ UserName: "jane@doe.com", From 8cfb1c133ec39a5fb95a5426a118d798c830545c Mon Sep 17 00:00:00 2001 From: Fabian Jakobs Date: Tue, 18 Jul 2023 12:16:34 +0200 Subject: [PATCH 002/139] First look for databricks.yml before falling back to bundle.yml (#580) ## Changes * Add support for using `databricks.yml` as config file. If `databricks.yml` is not found then falling back to `bundle.yml` for backwards compatibility. * Add support for `.yaml` extension. * Give an error when more than one config file is found ## Tests * added unit test * manual testing the different cases --------- Co-authored-by: Pieter Noordhuis --- bundle/bundle.go | 6 +- bundle/bundle_test.go | 4 +- .../config/mutator/process_root_includes.go | 6 +- .../mutator/process_root_includes_test.go | 2 +- bundle/config/root.go | 40 ++++++++-- bundle/config/root_test.go | 73 +++++++++++++++++-- .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 bundle/root.go | 12 ++- bundle/root_test.go | 4 +- .../{bundle.yml => databricks.yml} | 0 .../basic/{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 bundle/tests/conflicting_resource_ids_test.go | 4 +- .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 bundle/tests/job_and_pipeline_test.go | 8 +- .../{bundle.yml => databricks.yml} | 0 .../vanilla/{bundle.yml => databricks.yml} | 0 .../{bundle.yml => databricks.yml} | 0 cmd/sync/sync.go | 2 +- 29 files changed, 132 insertions(+), 29 deletions(-) rename bundle/config/testdata/duplicate_resource_name_in_subconfiguration/{bundle.yml => databricks.yml} (100%) rename bundle/config/testdata/duplicate_resource_names_in_root/{bundle.yml => databricks.yml} (100%) rename bundle/tests/autoload_git/{bundle.yml => databricks.yml} (100%) rename bundle/tests/basic/{bundle.yml => databricks.yml} (100%) rename bundle/tests/conflicting_resource_ids/no_subconfigurations/{bundle.yml => databricks.yml} (100%) rename bundle/tests/conflicting_resource_ids/one_subconfiguration/{bundle.yml => databricks.yml} (100%) rename bundle/tests/conflicting_resource_ids/two_subconfigurations/{bundle.yml => databricks.yml} (100%) rename bundle/tests/environment_empty/{bundle.yml => databricks.yml} (100%) rename bundle/tests/environment_overrides/{bundle.yml => databricks.yml} (100%) rename bundle/tests/include_default/{bundle.yml => databricks.yml} (100%) rename bundle/tests/include_invalid/{bundle.yml => databricks.yml} (100%) rename bundle/tests/include_override/{bundle.yml => databricks.yml} (100%) rename bundle/tests/include_with_glob/{bundle.yml => databricks.yml} (100%) rename bundle/tests/interpolation/{bundle.yml => databricks.yml} (100%) rename bundle/tests/job_and_pipeline/{bundle.yml => databricks.yml} (100%) rename bundle/tests/variables/env_overrides/{bundle.yml => databricks.yml} (100%) rename bundle/tests/variables/vanilla/{bundle.yml => databricks.yml} (100%) rename bundle/tests/yaml_anchors/{bundle.yml => databricks.yml} (100%) diff --git a/bundle/bundle.go b/bundle/bundle.go index 02d0eaac9..81fdfd4a8 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -45,7 +45,11 @@ type Bundle struct { func Load(path string) (*Bundle, error) { bundle := &Bundle{} - err := bundle.Config.Load(filepath.Join(path, config.FileName)) + configFile, err := config.FileNames.FindInPath(path) + if err != nil { + return nil, err + } + err = bundle.Config.Load(configFile) if err != nil { return nil, err } diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index 5a26d3508..18550f4f2 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -23,7 +23,7 @@ func TestLoadExists(t *testing.T) { func TestBundleCacheDir(t *testing.T) { projectDir := t.TempDir() - f1, err := os.Create(filepath.Join(projectDir, "bundle.yml")) + f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() @@ -47,7 +47,7 @@ func TestBundleCacheDir(t *testing.T) { func TestBundleCacheDirOverride(t *testing.T) { projectDir := t.TempDir() bundleTmpDir := t.TempDir() - f1, err := os.Create(filepath.Join(projectDir, "bundle.yml")) + f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 454e3a987..f3717ce01 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -27,8 +27,10 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error var out []bundle.Mutator // Map with files we've already seen to avoid loading them twice. - var seen = map[string]bool{ - config.FileName: true, + var seen = map[string]bool{} + + for _, file := range config.FileNames { + seen[file] = true } // Maintain list of files in order of files being loaded. diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index c7d00d88b..9ca5335ac 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -61,7 +61,7 @@ func TestProcessRootIncludesSingleGlob(t *testing.T) { }, } - touch(t, bundle.Config.Path, "bundle.yml") + touch(t, bundle.Config.Path, "databricks.yml") touch(t, bundle.Config.Path, "a.yml") touch(t, bundle.Config.Path, "b.yml") diff --git a/bundle/config/root.go b/bundle/config/root.go index 5ee337d30..28b1a6158 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -11,12 +11,39 @@ import ( "github.com/imdario/mergo" ) -// FileName is the name of bundle configuration file. -const FileName = "bundle.yml" +type ConfigFileNames []string + +// FileNames contains allowed names of bundle configuration files. +var FileNames = ConfigFileNames{"databricks.yml", "databricks.yaml", "bundle.yml", "bundle.yaml"} + +func (c ConfigFileNames) FindInPath(path string) (string, error) { + result := "" + var firstErr error + + for _, file := range c { + filePath := filepath.Join(path, file) + _, err := os.Stat(filePath) + if err == nil { + if result != "" { + return "", fmt.Errorf("multiple bundle root configuration files found in %s", path) + } + result = filePath + } else { + if firstErr == nil { + firstErr = err + } + } + } + + if result == "" { + return "", firstErr + } + return result, nil +} type Root struct { // Path contains the directory path to the root of the bundle. - // It is set when loading `bundle.yml`. + // It is set when loading `databricks.yml`. Path string `json:"-" bundle:"readonly"` // Contains user defined variables @@ -27,7 +54,7 @@ type Root struct { Bundle Bundle `json:"bundle"` // Include specifies a list of patterns of file names to load and - // merge into the this configuration. If not set in `bundle.yml`, + // merge into the this configuration. If not set in `databricks.yml`, // it defaults to loading `*.yml` and `*/*.yml`. // // Also see [mutator.DefineDefaultInclude]. @@ -62,7 +89,10 @@ func Load(path string) (*Root, error) { // If we were given a directory, assume this is the bundle root. if stat.IsDir() { - path = filepath.Join(path, FileName) + path, err = FileNames.FindInPath(path) + if err != nil { + return nil, err + } } if err := r.Load(path); err != nil { diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 818e89a2d..531ffcec1 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -2,7 +2,11 @@ package config import ( "encoding/json" + "os" + "path/filepath" "reflect" + "runtime" + "strings" "testing" "github.com/databricks/cli/bundle/config/variable" @@ -26,7 +30,7 @@ func TestRootMarshalUnmarshal(t *testing.T) { func TestRootLoad(t *testing.T) { root := &Root{} - err := root.Load("../tests/basic/bundle.yml") + err := root.Load("../tests/basic/databricks.yml") require.NoError(t, err) assert.Equal(t, "basic", root.Bundle.Name) } @@ -78,13 +82,13 @@ func TestRootMergeMap(t *testing.T) { func TestDuplicateIdOnLoadReturnsError(t *testing.T) { root := &Root{} - err := root.Load("./testdata/duplicate_resource_names_in_root/bundle.yml") - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/bundle.yml, pipeline at ./testdata/duplicate_resource_names_in_root/bundle.yml)") + err := root.Load("./testdata/duplicate_resource_names_in_root/databricks.yml") + assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_names_in_root/databricks.yml, pipeline at ./testdata/duplicate_resource_names_in_root/databricks.yml)") } func TestDuplicateIdOnMergeReturnsError(t *testing.T) { root := &Root{} - err := root.Load("./testdata/duplicate_resource_name_in_subconfiguration/bundle.yml") + err := root.Load("./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml") require.NoError(t, err) other := &Root{} @@ -92,7 +96,7 @@ func TestDuplicateIdOnMergeReturnsError(t *testing.T) { require.NoError(t, err) err = root.Merge(other) - assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/bundle.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") + assert.ErrorContains(t, err, "multiple resources named foo (job at ./testdata/duplicate_resource_name_in_subconfiguration/databricks.yml, pipeline at ./testdata/duplicate_resource_name_in_subconfiguration/resources.yml)") } func TestInitializeVariables(t *testing.T) { @@ -163,3 +167,62 @@ func TestRootMergeEnvironmentWithMode(t *testing.T) { require.NoError(t, root.MergeEnvironment(env)) assert.Equal(t, Development, root.Bundle.Mode) } + +func TestConfigFileNames_FindInPath(t *testing.T) { + testCases := []struct { + name string + files []string + expected string + err string + }{ + { + name: "file found", + files: []string{"databricks.yml"}, + expected: "BASE/databricks.yml", + err: "", + }, + { + name: "file found", + files: []string{"bundle.yml"}, + expected: "BASE/bundle.yml", + err: "", + }, + { + name: "multiple files found", + files: []string{"databricks.yaml", "bundle.yml"}, + expected: "", + err: "multiple bundle root configuration files found", + }, + { + name: "file not found", + files: []string{}, + expected: "", + err: "no such file or directory", + }, + } + + if runtime.GOOS == "windows" { + testCases[3].err = "The system cannot find the file specified." + } + + for _, tc := range testCases { + t.Run(tc.name, func(t *testing.T) { + projectDir := t.TempDir() + for _, file := range tc.files { + f1, _ := os.Create(filepath.Join(projectDir, file)) + f1.Close() + } + + result, err := FileNames.FindInPath(projectDir) + + expected := strings.Replace(tc.expected, "BASE/", projectDir+string(os.PathSeparator), 1) + assert.Equal(t, expected, result) + + if tc.err != "" { + assert.ErrorContains(t, err, tc.err) + } else { + assert.NoError(t, err) + } + }) + } +} diff --git a/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/bundle.yml b/bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_name_in_subconfiguration/bundle.yml rename to bundle/config/testdata/duplicate_resource_name_in_subconfiguration/databricks.yml diff --git a/bundle/config/testdata/duplicate_resource_names_in_root/bundle.yml b/bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml similarity index 100% rename from bundle/config/testdata/duplicate_resource_names_in_root/bundle.yml rename to bundle/config/testdata/duplicate_resource_names_in_root/databricks.yml diff --git a/bundle/root.go b/bundle/root.go index 70d778e15..46f63e134 100644 --- a/bundle/root.go +++ b/bundle/root.go @@ -36,11 +36,15 @@ func getRootWithTraversal() (string, error) { if err != nil { return "", err } - path, err := folders.FindDirWithLeaf(wd, config.FileName) - if err != nil { - return "", fmt.Errorf(`unable to locate bundle root: %s not found`, config.FileName) + + for _, file := range config.FileNames { + path, err := folders.FindDirWithLeaf(wd, file) + if err == nil { + return path, nil + } } - return path, nil + + return "", fmt.Errorf(`unable to locate bundle root: %s not found`, config.FileNames[0]) } // mustGetRoot returns a bundle root or an error if one cannot be found. diff --git a/bundle/root_test.go b/bundle/root_test.go index dab002256..2f8304921 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -76,8 +76,8 @@ func TestRootLookup(t *testing.T) { chdir(t, t.TempDir()) - // Create bundle.yml file. - f, err := os.Create(config.FileName) + // Create databricks.yml file. + f, err := os.Create(config.FileNames[0]) require.NoError(t, err) defer f.Close() diff --git a/bundle/tests/autoload_git/bundle.yml b/bundle/tests/autoload_git/databricks.yml similarity index 100% rename from bundle/tests/autoload_git/bundle.yml rename to bundle/tests/autoload_git/databricks.yml diff --git a/bundle/tests/basic/bundle.yml b/bundle/tests/basic/databricks.yml similarity index 100% rename from bundle/tests/basic/bundle.yml rename to bundle/tests/basic/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids/no_subconfigurations/bundle.yml b/bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/no_subconfigurations/bundle.yml rename to bundle/tests/conflicting_resource_ids/no_subconfigurations/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids/one_subconfiguration/bundle.yml b/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/one_subconfiguration/bundle.yml rename to bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/bundle.yml b/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml similarity index 100% rename from bundle/tests/conflicting_resource_ids/two_subconfigurations/bundle.yml rename to bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go index 12f460fde..b75e3753f 100644 --- a/bundle/tests/conflicting_resource_ids_test.go +++ b/bundle/tests/conflicting_resource_ids_test.go @@ -14,7 +14,7 @@ import ( func TestConflictingResourceIdsNoSubconfig(t *testing.T) { _, err := bundle.Load("./conflicting_resource_ids/no_subconfigurations") - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/bundle.yml") + bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) } @@ -22,7 +22,7 @@ func TestConflictingResourceIdsOneSubconfig(t *testing.T) { b, err := bundle.Load("./conflicting_resource_ids/one_subconfiguration") require.NoError(t, err) err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) - bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/bundle.yml") + bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) } diff --git a/bundle/tests/environment_empty/bundle.yml b/bundle/tests/environment_empty/databricks.yml similarity index 100% rename from bundle/tests/environment_empty/bundle.yml rename to bundle/tests/environment_empty/databricks.yml diff --git a/bundle/tests/environment_overrides/bundle.yml b/bundle/tests/environment_overrides/databricks.yml similarity index 100% rename from bundle/tests/environment_overrides/bundle.yml rename to bundle/tests/environment_overrides/databricks.yml diff --git a/bundle/tests/include_default/bundle.yml b/bundle/tests/include_default/databricks.yml similarity index 100% rename from bundle/tests/include_default/bundle.yml rename to bundle/tests/include_default/databricks.yml diff --git a/bundle/tests/include_invalid/bundle.yml b/bundle/tests/include_invalid/databricks.yml similarity index 100% rename from bundle/tests/include_invalid/bundle.yml rename to bundle/tests/include_invalid/databricks.yml diff --git a/bundle/tests/include_override/bundle.yml b/bundle/tests/include_override/databricks.yml similarity index 100% rename from bundle/tests/include_override/bundle.yml rename to bundle/tests/include_override/databricks.yml diff --git a/bundle/tests/include_with_glob/bundle.yml b/bundle/tests/include_with_glob/databricks.yml similarity index 100% rename from bundle/tests/include_with_glob/bundle.yml rename to bundle/tests/include_with_glob/databricks.yml diff --git a/bundle/tests/interpolation/bundle.yml b/bundle/tests/interpolation/databricks.yml similarity index 100% rename from bundle/tests/interpolation/bundle.yml rename to bundle/tests/interpolation/databricks.yml diff --git a/bundle/tests/job_and_pipeline/bundle.yml b/bundle/tests/job_and_pipeline/databricks.yml similarity index 100% rename from bundle/tests/job_and_pipeline/bundle.yml rename to bundle/tests/job_and_pipeline/databricks.yml diff --git a/bundle/tests/job_and_pipeline_test.go b/bundle/tests/job_and_pipeline_test.go index 775f415c2..d92eabd3b 100644 --- a/bundle/tests/job_and_pipeline_test.go +++ b/bundle/tests/job_and_pipeline_test.go @@ -15,7 +15,7 @@ func TestJobAndPipelineDevelopment(t *testing.T) { assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.Equal(t, b.Config.Bundle.Mode, config.Development) assert.True(t, p.Development) require.Len(t, p.Libraries, 1) @@ -29,7 +29,7 @@ func TestJobAndPipelineStaging(t *testing.T) { assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.False(t, p.Development) require.Len(t, p.Libraries, 1) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) @@ -42,14 +42,14 @@ func TestJobAndPipelineProduction(t *testing.T) { assert.Len(t, b.Config.Resources.Pipelines, 1) p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) assert.False(t, p.Development) require.Len(t, p.Libraries, 1) assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) assert.Equal(t, "nyc_taxi_production", p.Target) j := b.Config.Resources.Jobs["pipeline_schedule"] - assert.Equal(t, "job_and_pipeline/bundle.yml", filepath.ToSlash(j.ConfigFilePath)) + assert.Equal(t, "job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath)) assert.Equal(t, "Daily refresh of production pipeline", j.Name) require.Len(t, j.Tasks, 1) assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId) diff --git a/bundle/tests/variables/env_overrides/bundle.yml b/bundle/tests/variables/env_overrides/databricks.yml similarity index 100% rename from bundle/tests/variables/env_overrides/bundle.yml rename to bundle/tests/variables/env_overrides/databricks.yml diff --git a/bundle/tests/variables/vanilla/bundle.yml b/bundle/tests/variables/vanilla/databricks.yml similarity index 100% rename from bundle/tests/variables/vanilla/bundle.yml rename to bundle/tests/variables/vanilla/databricks.yml diff --git a/bundle/tests/yaml_anchors/bundle.yml b/bundle/tests/yaml_anchors/databricks.yml similarity index 100% rename from bundle/tests/yaml_anchors/bundle.yml rename to bundle/tests/yaml_anchors/databricks.yml diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index d13a85d03..51d71ea2f 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -72,7 +72,7 @@ var syncCmd = &cobra.Command{ // // To be uncommented and used once our VS Code extension is bundle aware. - // Until then, this could interfere with extension usage where a `bundle.yml` file is present. + // Until then, this could interfere with extension usage where a `databricks.yml` file is present. // See https://github.com/databricks/cli/pull/207. // // b := bundle.GetOrNil(cmd.Context()) From 65d8fe13e9da76809d103782c82a352f5f194cf3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Jul 2023 15:30:00 +0000 Subject: [PATCH 003/139] Bump github.com/databricks/databricks-sdk-go from 0.12.0 to 0.13.0 (#585) Bumps [github.com/databricks/databricks-sdk-go](https://github.com/databricks/databricks-sdk-go) from 0.12.0 to 0.13.0.
Release notes

Sourced from github.com/databricks/databricks-sdk-go's releases.

v0.13.0

  • Add issue templates (#539).
  • Added HasRequiredNonBodyField method (#536).
  • Make Azure MSI auth account compatible (#544).
  • Refactor Handling of NameID Mapping in OpenAPI Generator (#547).
  • Regenerate Go SDK from current OpenAPI Specification (#549).
  • Parse Camel Case and Pascal Case Enum Values (#550).
  • Prepare for auto-releaser infra (#554).
  • Added SCIM Patch Acceptance Tests (#540).

API Changes:

... (truncated)

Changelog

Sourced from github.com/databricks/databricks-sdk-go's changelog.

0.13.0

  • Add issue templates (#539).
  • Added HasRequiredNonBodyField method (#536).
  • Make Azure MSI auth account compatible (#544).
  • Refactor Handling of NameID Mapping in OpenAPI Generator (#547).
  • Regenerate Go SDK from current OpenAPI Specification (#549).
  • Parse Camel Case and Pascal Case Enum Values (#550).
  • Prepare for auto-releaser infra (#554).
  • Added SCIM Patch Acceptance Tests (#540).

API Changes:

... (truncated)

Commits
  • b4fb746 Release v0.13.0 (#555)
  • 180c7ee Added SCIM Patch Acceptance Tests (#540)
  • 546814a Prepare for auto-releaser infra (#554)
  • 7e680c5 Parse Camel Case and Pascal Case Enum Values (#550)
  • e71ece4 Bump google.golang.org/api from 0.130.0 to 0.131.0 (#551)
  • 3b4492b Regenerate Go SDK from current OpenAPI Specification (#549)
  • f84de61 Refactor Handling of Name<->ID Mapping in OpenAPI Generator (#547)
  • c37a894 Bump google.golang.org/api from 0.129.0 to 0.130.0 (#542)
  • 4f2aa38 Bump golang.org/x/mod from 0.11.0 to 0.12.0 (#541)
  • e80f6e1 Make Azure MSI auth account compatible (#544)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github.com/databricks/databricks-sdk-go&package-manager=go_modules&previous-version=0.12.0&new-version=0.13.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
--------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Serge Smertin --- bundle/deploy/terraform/convert_test.go | 2 +- cmd/workspace/metastores/metastores.go | 30 ++++++++++++------------- go.mod | 8 +++---- go.sum | 18 +++++++-------- 4 files changed, 29 insertions(+), 29 deletions(-) diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 76cec4e79..c47824ec5 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -26,7 +26,7 @@ func TestConvertJob(t *testing.T) { }, }, GitSource: &jobs.GitSource{ - GitProvider: jobs.GitProviderGithub, + GitProvider: jobs.GitProviderGitHub, GitUrl: "https://github.com/foo/bar", }, }, diff --git a/cmd/workspace/metastores/metastores.go b/cmd/workspace/metastores/metastores.go index e16f74ff1..1f27e0b8b 100755 --- a/cmd/workspace/metastores/metastores.go +++ b/cmd/workspace/metastores/metastores.go @@ -20,10 +20,10 @@ var Cmd = &cobra.Command{ Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a @@ -46,7 +46,7 @@ var assignCmd = &cobra.Command{ Use: "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID", Short: `Create an assignment.`, Long: `Create an assignment. - + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin.`, @@ -96,7 +96,7 @@ var createCmd = &cobra.Command{ Use: "create NAME STORAGE_ROOT", Short: `Create a metastore.`, Long: `Create a metastore. - + Creates a new metastore based on a provided name and storage root path.`, Annotations: map[string]string{}, @@ -144,7 +144,7 @@ var currentCmd = &cobra.Command{ Use: "current", Short: `Get metastore assignment for workspace.`, Long: `Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed.`, Annotations: map[string]string{}, @@ -178,7 +178,7 @@ var deleteCmd = &cobra.Command{ Use: "delete ID", Short: `Delete a metastore.`, Long: `Delete a metastore. - + Deletes a metastore. The caller must be a metastore admin.`, Annotations: map[string]string{}, @@ -230,7 +230,7 @@ var getCmd = &cobra.Command{ Use: "get ID", Short: `Get a metastore.`, Long: `Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info.`, @@ -281,7 +281,7 @@ var listCmd = &cobra.Command{ Use: "list", Short: `List metastores.`, Long: `List metastores. - + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.`, @@ -303,7 +303,7 @@ var listCmd = &cobra.Command{ } // start maintenance command -var maintenanceReq catalog.UpdateAutoMaintenance +var maintenanceReq catalog.UpdatePredictiveOptimization var maintenanceJson flags.JsonFlag func init() { @@ -317,7 +317,7 @@ var maintenanceCmd = &cobra.Command{ Use: "maintenance METASTORE_ID ENABLE", Short: `Enables or disables auto maintenance on the metastore.`, Long: `Enables or disables auto maintenance on the metastore. - + Enables or disables auto maintenance on the metastore.`, // This command is being previewed; hide from help output. @@ -349,7 +349,7 @@ var maintenanceCmd = &cobra.Command{ } } - response, err := w.Metastores.Maintenance(ctx, maintenanceReq) + response, err := w.Metastores.EnableOptimization(ctx, maintenanceReq) if err != nil { return err } @@ -371,7 +371,7 @@ var summaryCmd = &cobra.Command{ Use: "summary", Short: `Get a metastore summary.`, Long: `Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID.`, @@ -404,7 +404,7 @@ var unassignCmd = &cobra.Command{ Use: "unassign WORKSPACE_ID METASTORE_ID", Short: `Delete an assignment.`, Long: `Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator.`, Annotations: map[string]string{}, @@ -455,7 +455,7 @@ var updateCmd = &cobra.Command{ Use: "update ID", Short: `Update a metastore.`, Long: `Update a metastore. - + Updates information for a specific metastore. The caller must be a metastore admin.`, @@ -511,7 +511,7 @@ var updateAssignmentCmd = &cobra.Command{ Use: "update-assignment WORKSPACE_ID", Short: `Update an assignment.`, Long: `Update an assignment. - + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin diff --git a/go.mod b/go.mod index 610404bb4..b839d11bf 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.18 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.12.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.13.0 // Apache 2.0 github.com/fatih/color v1.15.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.3.0 // BSD-3-Clause @@ -54,10 +54,10 @@ require ( golang.org/x/net v0.12.0 // indirect golang.org/x/sys v0.10.0 // indirect golang.org/x/time v0.3.0 // indirect - google.golang.org/api v0.129.0 // indirect + google.golang.org/api v0.131.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc // indirect - google.golang.org/grpc v1.56.1 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130 // indirect + google.golang.org/grpc v1.56.2 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index 176d39254..1071343e5 100644 --- a/go.sum +++ b/go.sum @@ -34,8 +34,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/databricks/databricks-sdk-go v0.12.0 h1:VgMJpvEiyRRrJ0mQx22Rkc73zjxUe125Ou9c5C99phM= -github.com/databricks/databricks-sdk-go v0.12.0/go.mod h1:h/oWnnfWcJQAotAhZS/GMnlcaE/8WhuZ5Vj7el/6Gn8= +github.com/databricks/databricks-sdk-go v0.13.0 h1:Npi4laUUmcOPDPdJf2ZMGFUtybpf4LK6n5NQY56Ya2Q= +github.com/databricks/databricks-sdk-go v0.13.0/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -93,7 +93,7 @@ github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM= github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w= -github.com/googleapis/gax-go/v2 v2.11.0 h1:9V9PWXEsWnPpQhu/PeQIkS4eGzMlTLGgt80cUUI8Ki4= +github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= @@ -243,8 +243,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.129.0 h1:2XbdjjNfFPXQyufzQVwPf1RRnHH8Den2pfNE2jw7L8w= -google.golang.org/api v0.129.0/go.mod h1:dFjiXlanKwWE3612X97llhsoI36FAoIiRj3aTl5b/zE= +google.golang.org/api v0.131.0 h1:AcgWS2edQ4chVEt/SxgDKubVu/9/idCJy00tBGuGB4M= +google.golang.org/api v0.131.0/go.mod h1:7vtkbKv2REjJbxmHSkBTBQ5LUGvPdAqjjvt84XAfhpA= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= @@ -253,8 +253,8 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc h1:XSJ8Vk1SWuNr8S18z1NZSziL0CPIXLCCMDOEFtHBOFc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230530153820-e85fd2cbaebc/go.mod h1:66JfowdXAEgad5O9NnYcsNPLCPZJD++2L9X0PCMODrA= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130 h1:2FZP5XuJY9zQyGM5N0rtovnoXjiMUEIUMvw0m9wlpLc= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:8mL13HKkDa+IuJ8yruA3ci0q+0vsUz4m//+ottjwS5o= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -263,8 +263,8 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.56.1 h1:z0dNfjIl0VpaZ9iSVjA6daGatAYwPGstTjt5vkRMFkQ= -google.golang.org/grpc v1.56.1/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= +google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= From ff980962088753ef2fc53b3a3acdf9efe094fd48 Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Tue, 18 Jul 2023 17:48:35 +0200 Subject: [PATCH 004/139] Integrate with auto-release infra (#581) ## Changes - added changelog template - added `toolchain` to `.codegen.json` ## Tests none --- .codegen.json | 3 +++ .codegen/changelog.md.tmpl | 29 +++++++++++++++++++++++++++++ 2 files changed, 32 insertions(+) create mode 100644 .codegen/changelog.md.tmpl diff --git a/.codegen.json b/.codegen.json index cd1fa12ed..da4f3dd61 100644 --- a/.codegen.json +++ b/.codegen.json @@ -6,5 +6,8 @@ "batch": { ".codegen/cmds-workspace.go.tmpl": "cmd/workspace/cmd.go", ".codegen/cmds-account.go.tmpl": "cmd/account/cmd.go" + }, + "toolchain": { + "required": ["go"] } } diff --git a/.codegen/changelog.md.tmpl b/.codegen/changelog.md.tmpl new file mode 100644 index 000000000..83f1b7712 --- /dev/null +++ b/.codegen/changelog.md.tmpl @@ -0,0 +1,29 @@ +# Version changelog + +## {{.Version}} + +{{range .Changes -}} + * {{.}}. +{{end}}{{- if .ApiChanges}} +API Changes: +{{range .ApiChanges}}{{if or (eq .X "method") (eq .X "service")}} + * {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}. +{{- end}}{{- end}} + +OpenAPI SHA: {{.Sha}}, Date: {{.Changed}} +{{- end}}{{if .DependencyUpdates}} +Dependency updates: +{{range .DependencyUpdates}} + * {{.}}. +{{- end -}} +{{end}} + +## {{.PrevVersion}} + +{{- define "what" -}} + {{if eq .X "service" -}} + `databricks {{if .Service.IsAccounts}}account {{end -}}{{(.Service.TrimPrefix "account").KebabName}}` command group + {{- else if eq .X "method" -}} + `databricks {{if .Method.Service.IsAccounts}}account {{end -}}{{(.Method.Service.TrimPrefix "account").KebabName}} {{.Method.KebabName}}` command + {{- end}} +{{- end -}} From acf292da37172986b839dd648fc756c3fb3666cb Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Tue, 18 Jul 2023 19:13:48 +0200 Subject: [PATCH 005/139] Release v0.201.0 (#586) * Add development runs ([#522](https://github.com/databricks/cli/pull/522)). * Support tab completion for profiles ([#572](https://github.com/databricks/cli/pull/572)). * Correctly use --profile flag passed for all bundle commands ([#571](https://github.com/databricks/cli/pull/571)). * Disallow notebooks in paths where files are expected ([#573](https://github.com/databricks/cli/pull/573)). * Improve auth login experience ([#570](https://github.com/databricks/cli/pull/570)). * Remove base path checks during sync ([#576](https://github.com/databricks/cli/pull/576)). * First look for databricks.yml before falling back to bundle.yml ([#580](https://github.com/databricks/cli/pull/580)). * Integrate with auto-release infra ([#581](https://github.com/databricks/cli/pull/581)). API Changes: * Removed `databricks metastores maintenance` command. * Added `databricks metastores enable-optimization` command. * Added `databricks tables update` command. * Changed `databricks account settings delete-personal-compute-setting` command with new required argument order. * Changed `databricks account settings read-personal-compute-setting` command with new required argument order. * Added `databricks clean-rooms` command group. OpenAPI SHA: 850a075ed9758d21a6bc4409506b48c8b9f93ab4, Date: 2023-07-18 Dependency updates: * Bump golang.org/x/term from 0.9.0 to 0.10.0 ([#567](https://github.com/databricks/cli/pull/567)). * Bump golang.org/x/oauth2 from 0.9.0 to 0.10.0 ([#566](https://github.com/databricks/cli/pull/566)). * Bump golang.org/x/mod from 0.11.0 to 0.12.0 ([#568](https://github.com/databricks/cli/pull/568)). * Bump github.com/databricks/databricks-sdk-go from 0.12.0 to 0.13.0 ([#585](https://github.com/databricks/cli/pull/585)). --- .codegen/_openapi_sha | 1 + .gitattributes | 1 + CHANGELOG.md | 28 ++ cmd/account/groups/groups.go | 3 +- cmd/account/metastores/metastores.go | 2 + .../service-principals/service-principals.go | 3 +- cmd/account/settings/settings.go | 58 ++-- .../storage-credentials.go | 2 + cmd/account/users/users.go | 3 +- cmd/workspace/alerts/alerts.go | 2 +- cmd/workspace/clean-rooms/clean-rooms.go | 258 ++++++++++++++++++ cmd/workspace/cmd.go | 3 + cmd/workspace/dashboards/dashboards.go | 6 +- cmd/workspace/experiments/experiments.go | 2 +- cmd/workspace/groups/groups.go | 3 +- .../instance-profiles/instance-profiles.go | 4 +- cmd/workspace/jobs/jobs.go | 22 +- cmd/workspace/metastores/metastores.go | 140 +++++----- cmd/workspace/pipelines/pipelines.go | 20 +- .../policy-families/policy-families.go | 12 +- cmd/workspace/queries/queries.go | 16 +- .../service-principals/service-principals.go | 3 +- cmd/workspace/tables/tables.go | 61 +++++ cmd/workspace/users/users.go | 3 +- 24 files changed, 482 insertions(+), 174 deletions(-) create mode 100644 .codegen/_openapi_sha create mode 100755 cmd/workspace/clean-rooms/clean-rooms.go diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha new file mode 100644 index 000000000..ae66d336a --- /dev/null +++ b/.codegen/_openapi_sha @@ -0,0 +1 @@ +850a075ed9758d21a6bc4409506b48c8b9f93ab4 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 8b95da207..714d6c53a 100755 --- a/.gitattributes +++ b/.gitattributes @@ -25,6 +25,7 @@ cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspaces/workspaces.go linguist-generated=true cmd/workspace/alerts/alerts.go linguist-generated=true cmd/workspace/catalogs/catalogs.go linguist-generated=true +cmd/workspace/clean-rooms/clean-rooms.go linguist-generated=true cmd/workspace/cluster-policies/cluster-policies.go linguist-generated=true cmd/workspace/clusters/clusters.go linguist-generated=true cmd/workspace/cmd.go linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 0a7ed72bd..8c045ef0b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,33 @@ # Version changelog +## 0.201.0 + +* Add development runs ([#522](https://github.com/databricks/cli/pull/522)). +* Support tab completion for profiles ([#572](https://github.com/databricks/cli/pull/572)). +* Correctly use --profile flag passed for all bundle commands ([#571](https://github.com/databricks/cli/pull/571)). +* Disallow notebooks in paths where files are expected ([#573](https://github.com/databricks/cli/pull/573)). +* Improve auth login experience ([#570](https://github.com/databricks/cli/pull/570)). +* Remove base path checks during sync ([#576](https://github.com/databricks/cli/pull/576)). +* First look for databricks.yml before falling back to bundle.yml ([#580](https://github.com/databricks/cli/pull/580)). +* Integrate with auto-release infra ([#581](https://github.com/databricks/cli/pull/581)). + +API Changes: + + * Removed `databricks metastores maintenance` command. + * Added `databricks metastores enable-optimization` command. + * Added `databricks tables update` command. + * Changed `databricks account settings delete-personal-compute-setting` command with new required argument order. + * Changed `databricks account settings read-personal-compute-setting` command with new required argument order. + * Added `databricks clean-rooms` command group. + +OpenAPI SHA: 850a075ed9758d21a6bc4409506b48c8b9f93ab4, Date: 2023-07-18 +Dependency updates: + + * Bump golang.org/x/term from 0.9.0 to 0.10.0 ([#567](https://github.com/databricks/cli/pull/567)). + * Bump golang.org/x/oauth2 from 0.9.0 to 0.10.0 ([#566](https://github.com/databricks/cli/pull/566)). + * Bump golang.org/x/mod from 0.11.0 to 0.12.0 ([#568](https://github.com/databricks/cli/pull/568)). + * Bump github.com/databricks/databricks-sdk-go from 0.12.0 to 0.13.0 ([#585](https://github.com/databricks/cli/pull/585)). + ## 0.200.2 CLI: diff --git a/cmd/account/groups/groups.go b/cmd/account/groups/groups.go index 5897ef292..55d0c7810 100755 --- a/cmd/account/groups/groups.go +++ b/cmd/account/groups/groups.go @@ -258,7 +258,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } diff --git a/cmd/account/metastores/metastores.go b/cmd/account/metastores/metastores.go index decbb4060..89e1c8f2e 100755 --- a/cmd/account/metastores/metastores.go +++ b/cmd/account/metastores/metastores.go @@ -80,6 +80,8 @@ func init() { Cmd.AddCommand(deleteCmd) // TODO: short flags + deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) + } var deleteCmd = &cobra.Command{ diff --git a/cmd/account/service-principals/service-principals.go b/cmd/account/service-principals/service-principals.go index 6ed4a69ab..55b7492ff 100755 --- a/cmd/account/service-principals/service-principals.go +++ b/cmd/account/service-principals/service-principals.go @@ -257,7 +257,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } diff --git a/cmd/account/settings/settings.go b/cmd/account/settings/settings.go index 8c507c3f6..c55c7ad62 100755 --- a/cmd/account/settings/settings.go +++ b/cmd/account/settings/settings.go @@ -12,8 +12,16 @@ import ( var Cmd = &cobra.Command{ Use: "settings", - Short: `TBD.`, - Long: `TBD`, + Short: `The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.`, + Long: `The Personal Compute enablement setting lets you control which users can use + the Personal Compute default policy to create compute resources. By default + all users in all workspaces have access (ON), but you can change the setting + to instead let individual workspaces configure access control (DELEGATE). + + There is only one instance of this setting per account. Since this setting has + a default value, this setting is present on all accounts even though it's + never set on a given account. Deletion reverts the value of the setting back + to the default value.`, Annotations: map[string]string{ "package": "settings", }, @@ -24,30 +32,23 @@ var Cmd = &cobra.Command{ // start delete-personal-compute-setting command var deletePersonalComputeSettingReq settings.DeletePersonalComputeSettingRequest -var deletePersonalComputeSettingJson flags.JsonFlag func init() { Cmd.AddCommand(deletePersonalComputeSettingCmd) // TODO: short flags - deletePersonalComputeSettingCmd.Flags().Var(&deletePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - - deletePersonalComputeSettingCmd.Flags().StringVar(&deletePersonalComputeSettingReq.Etag, "etag", deletePersonalComputeSettingReq.Etag, `TBD.`) } var deletePersonalComputeSettingCmd = &cobra.Command{ - Use: "delete-personal-compute-setting", + Use: "delete-personal-compute-setting ETAG", Short: `Delete Personal Compute setting.`, Long: `Delete Personal Compute setting. - TBD`, + Reverts back the Personal Compute setting value to default (ON)`, Annotations: map[string]string{}, Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } + check := cobra.ExactArgs(1) return check(cmd, args) }, PreRunE: root.MustAccountClient, @@ -55,13 +56,7 @@ var deletePersonalComputeSettingCmd = &cobra.Command{ ctx := cmd.Context() a := root.AccountClient(ctx) - if cmd.Flags().Changed("json") { - err = deletePersonalComputeSettingJson.Unmarshal(&deletePersonalComputeSettingReq) - if err != nil { - return err - } - } else { - } + deletePersonalComputeSettingReq.Etag = args[0] response, err := a.Settings.DeletePersonalComputeSetting(ctx, deletePersonalComputeSettingReq) if err != nil { @@ -76,30 +71,23 @@ var deletePersonalComputeSettingCmd = &cobra.Command{ // start read-personal-compute-setting command var readPersonalComputeSettingReq settings.ReadPersonalComputeSettingRequest -var readPersonalComputeSettingJson flags.JsonFlag func init() { Cmd.AddCommand(readPersonalComputeSettingCmd) // TODO: short flags - readPersonalComputeSettingCmd.Flags().Var(&readPersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - - readPersonalComputeSettingCmd.Flags().StringVar(&readPersonalComputeSettingReq.Etag, "etag", readPersonalComputeSettingReq.Etag, `TBD.`) } var readPersonalComputeSettingCmd = &cobra.Command{ - Use: "read-personal-compute-setting", + Use: "read-personal-compute-setting ETAG", Short: `Get Personal Compute setting.`, Long: `Get Personal Compute setting. - TBD`, + Gets the value of the Personal Compute setting.`, Annotations: map[string]string{}, Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } + check := cobra.ExactArgs(1) return check(cmd, args) }, PreRunE: root.MustAccountClient, @@ -107,13 +95,7 @@ var readPersonalComputeSettingCmd = &cobra.Command{ ctx := cmd.Context() a := root.AccountClient(ctx) - if cmd.Flags().Changed("json") { - err = readPersonalComputeSettingJson.Unmarshal(&readPersonalComputeSettingReq) - if err != nil { - return err - } - } else { - } + readPersonalComputeSettingReq.Etag = args[0] response, err := a.Settings.ReadPersonalComputeSetting(ctx, readPersonalComputeSettingReq) if err != nil { @@ -135,7 +117,7 @@ func init() { // TODO: short flags updatePersonalComputeSettingCmd.Flags().Var(&updatePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updatePersonalComputeSettingCmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `TBD.`) + updatePersonalComputeSettingCmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `This should always be set to true for Settings RPCs.`) // TODO: complex arg: setting } @@ -145,7 +127,7 @@ var updatePersonalComputeSettingCmd = &cobra.Command{ Short: `Update Personal Compute setting.`, Long: `Update Personal Compute setting. - TBD`, + Updates the value of the Personal Compute setting.`, Annotations: map[string]string{}, Args: func(cmd *cobra.Command, args []string) error { diff --git a/cmd/account/storage-credentials/storage-credentials.go b/cmd/account/storage-credentials/storage-credentials.go index 73e328dc5..f5dd58200 100755 --- a/cmd/account/storage-credentials/storage-credentials.go +++ b/cmd/account/storage-credentials/storage-credentials.go @@ -81,6 +81,8 @@ func init() { Cmd.AddCommand(deleteCmd) // TODO: short flags + deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the Storage Credential is not empty.`) + } var deleteCmd = &cobra.Command{ diff --git a/cmd/account/users/users.go b/cmd/account/users/users.go index 8a0b40b83..3c3edd0f5 100755 --- a/cmd/account/users/users.go +++ b/cmd/account/users/users.go @@ -265,7 +265,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } diff --git a/cmd/workspace/alerts/alerts.go b/cmd/workspace/alerts/alerts.go index 91417c63f..e13f72849 100755 --- a/cmd/workspace/alerts/alerts.go +++ b/cmd/workspace/alerts/alerts.go @@ -34,7 +34,7 @@ func init() { // TODO: short flags createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the alert.`) + createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) createCmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) } diff --git a/cmd/workspace/clean-rooms/clean-rooms.go b/cmd/workspace/clean-rooms/clean-rooms.go new file mode 100755 index 000000000..328ed3e73 --- /dev/null +++ b/cmd/workspace/clean-rooms/clean-rooms.go @@ -0,0 +1,258 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package clean_rooms + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sharing" + "github.com/spf13/cobra" +) + +var Cmd = &cobra.Command{ + Use: "clean-rooms", + Short: `A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases.`, + Long: `A clean room is a secure, privacy-protecting environment where two or more + parties can share sensitive enterprise data, including customer data, for + measurements, insights, activation and other use cases. + + To create clean rooms, you must be a metastore admin or a user with the + **CREATE_CLEAN_ROOM** privilege.`, + Annotations: map[string]string{ + "package": "sharing", + }, + + // This service is being previewed; hide from help output. + Hidden: true, +} + +// start create command +var createReq sharing.CreateCleanRoom +var createJson flags.JsonFlag + +func init() { + Cmd.AddCommand(createCmd) + // TODO: short flags + createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + +} + +var createCmd = &cobra.Command{ + Use: "create", + Short: `Create a clean room.`, + Long: `Create a clean room. + + Creates a new clean room with specified colaborators. The caller must be a + metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.`, + + Annotations: map[string]string{}, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.CleanRooms.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + +// start delete command +var deleteReq sharing.DeleteCleanRoomRequest + +func init() { + Cmd.AddCommand(deleteCmd) + // TODO: short flags + +} + +var deleteCmd = &cobra.Command{ + Use: "delete NAME_ARG", + Short: `Delete a clean room.`, + Long: `Delete a clean room. + + Deletes a data object clean room from the metastore. The caller must be an + owner of the clean room.`, + + Annotations: map[string]string{}, + Args: func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + }, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.NameArg = args[0] + + err = w.CleanRooms.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + +// start get command +var getReq sharing.GetCleanRoomRequest + +func init() { + Cmd.AddCommand(getCmd) + // TODO: short flags + + getCmd.Flags().BoolVar(&getReq.IncludeRemoteDetails, "include-remote-details", getReq.IncludeRemoteDetails, `Whether to include remote details (central) on the clean room.`) + +} + +var getCmd = &cobra.Command{ + Use: "get NAME_ARG", + Short: `Get a clean room.`, + Long: `Get a clean room. + + Gets a data object clean room from the metastore. The caller must be a + metastore admin or the owner of the clean room.`, + + Annotations: map[string]string{}, + Args: func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + }, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.NameArg = args[0] + + response, err := w.CleanRooms.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + +// start list command + +func init() { + Cmd.AddCommand(listCmd) + +} + +var listCmd = &cobra.Command{ + Use: "list", + Short: `List clean rooms.`, + Long: `List clean rooms. + + Gets an array of data object clean rooms from the metastore. The caller must + be a metastore admin or the owner of the clean room. There is no guarantee of + a specific ordering of the elements in the array.`, + + Annotations: map[string]string{}, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.CleanRooms.ListAll(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + +// start update command +var updateReq sharing.UpdateCleanRoom +var updateJson flags.JsonFlag + +func init() { + Cmd.AddCommand(updateCmd) + // TODO: short flags + updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: catalog_updates + updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the clean room.`) + updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of clean room.`) + +} + +var updateCmd = &cobra.Command{ + Use: "update NAME_ARG", + Short: `Update a clean room.`, + Long: `Update a clean room. + + Updates the clean room with the changes and data objects in the request. The + caller must be the owner of the clean room or a metastore admin. + + When the caller is a metastore admin, only the __owner__ field can be updated. + + In the case that the clean room name is changed **updateCleanRoom** requires + that the caller is both the clean room owner and a metastore admin. + + For each table that is added through this method, the clean room owner must + also have **SELECT** privilege on the table. The privilege must be maintained + indefinitely for recipients to be able to access the table. Typically, you + should use a group as the clean room owner. + + Table removals through **update** do not require additional privileges.`, + + Annotations: map[string]string{}, + Args: func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + }, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } + updateReq.NameArg = args[0] + + response, err := w.CleanRooms.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + +// end service CleanRooms diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 68ce3ef06..596dba829 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -7,6 +7,7 @@ import ( alerts "github.com/databricks/cli/cmd/workspace/alerts" catalogs "github.com/databricks/cli/cmd/workspace/catalogs" + clean_rooms "github.com/databricks/cli/cmd/workspace/clean-rooms" cluster_policies "github.com/databricks/cli/cmd/workspace/cluster-policies" clusters "github.com/databricks/cli/cmd/workspace/clusters" connections "github.com/databricks/cli/cmd/workspace/connections" @@ -58,6 +59,7 @@ import ( func init() { root.RootCmd.AddCommand(alerts.Cmd) root.RootCmd.AddCommand(catalogs.Cmd) + root.RootCmd.AddCommand(clean_rooms.Cmd) root.RootCmd.AddCommand(cluster_policies.Cmd) root.RootCmd.AddCommand(clusters.Cmd) root.RootCmd.AddCommand(connections.Cmd) @@ -108,6 +110,7 @@ func init() { // Register commands with groups alerts.Cmd.GroupID = "sql" catalogs.Cmd.GroupID = "catalog" + clean_rooms.Cmd.GroupID = "sharing" cluster_policies.Cmd.GroupID = "compute" clusters.Cmd.GroupID = "compute" connections.Cmd.GroupID = "catalog" diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index b18ddff8c..014be02f8 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -35,11 +35,6 @@ func init() { // TODO: short flags createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.IsFavorite, "is-favorite", createReq.IsFavorite, `Indicates whether this query object should appear in the current user's favorites list.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The title of this dashboard that appears in list views and at the top of the dashboard page.`) - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the dashboard.`) - // TODO: array: tags - } var createCmd = &cobra.Command{ @@ -66,6 +61,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Dashboards.Create(ctx, createReq) diff --git a/cmd/workspace/experiments/experiments.go b/cmd/workspace/experiments/experiments.go index 738c0240d..a95da2f5c 100755 --- a/cmd/workspace/experiments/experiments.go +++ b/cmd/workspace/experiments/experiments.go @@ -645,7 +645,7 @@ var logBatchCmd = &cobra.Command{ The following limits also apply to metric, param, and tag keys and values: - * Metric keyes, param keys, and tag keys can be up to 250 characters in length + * Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag values can be up to 250 characters in length`, Annotations: map[string]string{}, diff --git a/cmd/workspace/groups/groups.go b/cmd/workspace/groups/groups.go index bdadd1125..39a95aada 100755 --- a/cmd/workspace/groups/groups.go +++ b/cmd/workspace/groups/groups.go @@ -258,7 +258,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } diff --git a/cmd/workspace/instance-profiles/instance-profiles.go b/cmd/workspace/instance-profiles/instance-profiles.go index ee2d92808..17eea267c 100755 --- a/cmd/workspace/instance-profiles/instance-profiles.go +++ b/cmd/workspace/instance-profiles/instance-profiles.go @@ -34,7 +34,7 @@ func init() { addCmd.Flags().Var(&addJson, "json", `either inline JSON string or @path/to/file.json with request body`) addCmd.Flags().StringVar(&addReq.IamRoleArn, "iam-role-arn", addReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - addCmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) + addCmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) addCmd.Flags().BoolVar(&addReq.SkipValidation, "skip-validation", addReq.SkipValidation, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) } @@ -90,7 +90,7 @@ func init() { editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) editCmd.Flags().StringVar(&editReq.IamRoleArn, "iam-role-arn", editReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - editCmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) + editCmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) } diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index 1dfb065b1..41101bdbf 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -203,25 +203,6 @@ func init() { // TODO: short flags createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: access_control_list - // TODO: array: compute - // TODO: complex arg: continuous - // TODO: complex arg: email_notifications - createCmd.Flags().Var(&createReq.Format, "format", `Used to tell what is the format of the job.`) - // TODO: complex arg: git_source - // TODO: array: job_clusters - createCmd.Flags().IntVar(&createReq.MaxConcurrentRuns, "max-concurrent-runs", createReq.MaxConcurrentRuns, `An optional maximum allowed number of concurrent runs of the job.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `An optional name for the job.`) - // TODO: complex arg: notification_settings - // TODO: array: parameters - // TODO: complex arg: run_as - // TODO: complex arg: schedule - // TODO: map via StringToStringVar: tags - // TODO: array: tasks - createCmd.Flags().IntVar(&createReq.TimeoutSeconds, "timeout-seconds", createReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) - // TODO: complex arg: trigger - // TODO: complex arg: webhook_notifications - } var createCmd = &cobra.Command{ @@ -250,6 +231,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Jobs.Create(ctx, createReq) @@ -1012,7 +994,9 @@ func init() { submitCmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list + // TODO: complex arg: email_notifications // TODO: complex arg: git_source + // TODO: complex arg: health submitCmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) // TODO: complex arg: notification_settings submitCmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) diff --git a/cmd/workspace/metastores/metastores.go b/cmd/workspace/metastores/metastores.go index 1f27e0b8b..9db023a77 100755 --- a/cmd/workspace/metastores/metastores.go +++ b/cmd/workspace/metastores/metastores.go @@ -20,10 +20,10 @@ var Cmd = &cobra.Command{ Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use Unity Catalog, it must have a Unity Catalog metastore attached. - + Each metastore is configured with a root storage location in a cloud storage account. This storage location is used for metadata and managed tables data. - + NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a @@ -46,7 +46,7 @@ var assignCmd = &cobra.Command{ Use: "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID", Short: `Create an assignment.`, Long: `Create an assignment. - + Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account admin.`, @@ -96,7 +96,7 @@ var createCmd = &cobra.Command{ Use: "create NAME STORAGE_ROOT", Short: `Create a metastore.`, Long: `Create a metastore. - + Creates a new metastore based on a provided name and storage root path.`, Annotations: map[string]string{}, @@ -144,7 +144,7 @@ var currentCmd = &cobra.Command{ Use: "current", Short: `Get metastore assignment for workspace.`, Long: `Get metastore assignment for workspace. - + Gets the metastore assignment for the workspace being accessed.`, Annotations: map[string]string{}, @@ -178,7 +178,7 @@ var deleteCmd = &cobra.Command{ Use: "delete ID", Short: `Delete a metastore.`, Long: `Delete a metastore. - + Deletes a metastore. The caller must be a metastore admin.`, Annotations: map[string]string{}, @@ -217,6 +217,64 @@ var deleteCmd = &cobra.Command{ ValidArgsFunction: cobra.NoFileCompletions, } +// start enable-optimization command +var enableOptimizationReq catalog.UpdatePredictiveOptimization +var enableOptimizationJson flags.JsonFlag + +func init() { + Cmd.AddCommand(enableOptimizationCmd) + // TODO: short flags + enableOptimizationCmd.Flags().Var(&enableOptimizationJson, "json", `either inline JSON string or @path/to/file.json with request body`) + +} + +var enableOptimizationCmd = &cobra.Command{ + Use: "enable-optimization METASTORE_ID ENABLE", + Short: `Toggle predictive optimization on the metastore.`, + Long: `Toggle predictive optimization on the metastore. + + Enables or disables predictive optimization on the metastore.`, + + // This command is being previewed; hide from help output. + Hidden: true, + + Annotations: map[string]string{}, + Args: func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + }, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = enableOptimizationJson.Unmarshal(&enableOptimizationReq) + if err != nil { + return err + } + } else { + enableOptimizationReq.MetastoreId = args[0] + _, err = fmt.Sscan(args[1], &enableOptimizationReq.Enable) + if err != nil { + return fmt.Errorf("invalid ENABLE: %s", args[1]) + } + } + + response, err := w.Metastores.EnableOptimization(ctx, enableOptimizationReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + // start get command var getReq catalog.GetMetastoreRequest @@ -230,7 +288,7 @@ var getCmd = &cobra.Command{ Use: "get ID", Short: `Get a metastore.`, Long: `Get a metastore. - + Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this info.`, @@ -281,7 +339,7 @@ var listCmd = &cobra.Command{ Use: "list", Short: `List metastores.`, Long: `List metastores. - + Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.`, @@ -302,64 +360,6 @@ var listCmd = &cobra.Command{ ValidArgsFunction: cobra.NoFileCompletions, } -// start maintenance command -var maintenanceReq catalog.UpdatePredictiveOptimization -var maintenanceJson flags.JsonFlag - -func init() { - Cmd.AddCommand(maintenanceCmd) - // TODO: short flags - maintenanceCmd.Flags().Var(&maintenanceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - -} - -var maintenanceCmd = &cobra.Command{ - Use: "maintenance METASTORE_ID ENABLE", - Short: `Enables or disables auto maintenance on the metastore.`, - Long: `Enables or disables auto maintenance on the metastore. - - Enables or disables auto maintenance on the metastore.`, - - // This command is being previewed; hide from help output. - Hidden: true, - - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(2) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { - ctx := cmd.Context() - w := root.WorkspaceClient(ctx) - - if cmd.Flags().Changed("json") { - err = maintenanceJson.Unmarshal(&maintenanceReq) - if err != nil { - return err - } - } else { - maintenanceReq.MetastoreId = args[0] - _, err = fmt.Sscan(args[1], &maintenanceReq.Enable) - if err != nil { - return fmt.Errorf("invalid ENABLE: %s", args[1]) - } - } - - response, err := w.Metastores.EnableOptimization(ctx, maintenanceReq) - if err != nil { - return err - } - return cmdio.Render(ctx, response) - }, - // Disable completions since they are not applicable. - // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, -} - // start summary command func init() { @@ -371,7 +371,7 @@ var summaryCmd = &cobra.Command{ Use: "summary", Short: `Get a metastore summary.`, Long: `Get a metastore summary. - + Gets information about a metastore. This summary includes the storage credential, the cloud vendor, the cloud region, and the global metastore ID.`, @@ -404,7 +404,7 @@ var unassignCmd = &cobra.Command{ Use: "unassign WORKSPACE_ID METASTORE_ID", Short: `Delete an assignment.`, Long: `Delete an assignment. - + Deletes a metastore assignment. The caller must be an account administrator.`, Annotations: map[string]string{}, @@ -455,7 +455,7 @@ var updateCmd = &cobra.Command{ Use: "update ID", Short: `Update a metastore.`, Long: `Update a metastore. - + Updates information for a specific metastore. The caller must be a metastore admin.`, @@ -511,7 +511,7 @@ var updateAssignmentCmd = &cobra.Command{ Use: "update-assignment WORKSPACE_ID", Short: `Update an assignment.`, Long: `Update an assignment. - + Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 8d6ffff14..10f37846d 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -44,25 +44,6 @@ func init() { // TODO: short flags createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.AllowDuplicateNames, "allow-duplicate-names", createReq.AllowDuplicateNames, `If false, deployment will fail if name conflicts with that of another pipeline.`) - createCmd.Flags().StringVar(&createReq.Catalog, "catalog", createReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) - createCmd.Flags().StringVar(&createReq.Channel, "channel", createReq.Channel, `DLT Release Channel that specifies which version to use.`) - // TODO: array: clusters - // TODO: map via StringToStringVar: configuration - createCmd.Flags().BoolVar(&createReq.Continuous, "continuous", createReq.Continuous, `Whether the pipeline is continuous or triggered.`) - createCmd.Flags().BoolVar(&createReq.Development, "development", createReq.Development, `Whether the pipeline is in Development mode.`) - createCmd.Flags().BoolVar(&createReq.DryRun, "dry-run", createReq.DryRun, ``) - createCmd.Flags().StringVar(&createReq.Edition, "edition", createReq.Edition, `Pipeline product edition.`) - // TODO: complex arg: filters - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Unique identifier for this pipeline.`) - // TODO: array: libraries - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Friendly identifier for this pipeline.`) - createCmd.Flags().BoolVar(&createReq.Photon, "photon", createReq.Photon, `Whether Photon is enabled for this pipeline.`) - createCmd.Flags().BoolVar(&createReq.Serverless, "serverless", createReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) - createCmd.Flags().StringVar(&createReq.Storage, "storage", createReq.Storage, `DBFS root directory for storing checkpoints and tables.`) - createCmd.Flags().StringVar(&createReq.Target, "target", createReq.Target, `Target schema (database) to add tables in this pipeline to.`) - // TODO: complex arg: trigger - } var createCmd = &cobra.Command{ @@ -92,6 +73,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Pipelines.Create(ctx, createReq) diff --git a/cmd/workspace/policy-families/policy-families.go b/cmd/workspace/policy-families/policy-families.go index 37b45c8e1..8954afa1d 100755 --- a/cmd/workspace/policy-families/policy-families.go +++ b/cmd/workspace/policy-families/policy-families.go @@ -37,7 +37,11 @@ func init() { } var getCmd = &cobra.Command{ - Use: "get POLICY_FAMILY_ID", + Use: "get POLICY_FAMILY_ID", + Short: `Get policy family information.`, + Long: `Get policy family information. + + Retrieve the information for an policy family based on its identifier.`, Annotations: map[string]string{}, Args: func(cmd *cobra.Command, args []string) error { @@ -77,7 +81,11 @@ func init() { } var listCmd = &cobra.Command{ - Use: "list", + Use: "list", + Short: `List policy families.`, + Long: `List policy families. + + Retrieve a list of policy families. This API is paginated.`, Annotations: map[string]string{}, Args: func(cmd *cobra.Command, args []string) error { diff --git a/cmd/workspace/queries/queries.go b/cmd/workspace/queries/queries.go index 3f7e8ca31..8cf352783 100755 --- a/cmd/workspace/queries/queries.go +++ b/cmd/workspace/queries/queries.go @@ -33,13 +33,6 @@ func init() { // TODO: short flags createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DataSourceId, "data-source-id", createReq.DataSourceId, `The ID of the data source / SQL warehouse where this query will run.`) - createCmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `General description that can convey additional information about this query such as usage notes.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `The name or title of this query to display in list views.`) - // TODO: any: options - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the query.`) - createCmd.Flags().StringVar(&createReq.Query, "query", createReq.Query, `The text of the query.`) - } var createCmd = &cobra.Command{ @@ -76,6 +69,7 @@ var createCmd = &cobra.Command{ return err } } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") } response, err := w.Queries.Create(ctx, createReq) @@ -314,11 +308,11 @@ func init() { // TODO: short flags updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `The ID of the data source / SQL warehouse where this query will run.`) - updateCmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that can convey additional information about this query such as usage notes.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name or title of this query to display in list views.`) + updateCmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID.`) + updateCmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`) + updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`) // TODO: any: options - updateCmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query.`) + updateCmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`) } diff --git a/cmd/workspace/service-principals/service-principals.go b/cmd/workspace/service-principals/service-principals.go index 7dfc390f2..4bb75d2b4 100755 --- a/cmd/workspace/service-principals/service-principals.go +++ b/cmd/workspace/service-principals/service-principals.go @@ -257,7 +257,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } diff --git a/cmd/workspace/tables/tables.go b/cmd/workspace/tables/tables.go index 9ff653b99..d57b72f1b 100755 --- a/cmd/workspace/tables/tables.go +++ b/cmd/workspace/tables/tables.go @@ -258,4 +258,65 @@ var listSummariesCmd = &cobra.Command{ ValidArgsFunction: cobra.NoFileCompletions, } +// start update command +var updateReq catalog.UpdateTableRequest + +func init() { + Cmd.AddCommand(updateCmd) + // TODO: short flags + + updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, ``) + +} + +var updateCmd = &cobra.Command{ + Use: "update FULL_NAME", + Short: `Update a table owner.`, + Long: `Update a table owner. + + Change the owner of the table. The caller must be the owner of the parent + catalog, have the **USE_CATALOG** privilege on the parent catalog and be the + owner of the parent schema, or be the owner of the table and have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema.`, + + // This command is being previewed; hide from help output. + Hidden: true, + + Annotations: map[string]string{}, + PreRunE: root.MustWorkspaceClient, + RunE: func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the table") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the table") + } + updateReq.FullName = args[0] + + err = w.Tables.Update(ctx, updateReq) + if err != nil { + return err + } + return nil + }, + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + ValidArgsFunction: cobra.NoFileCompletions, +} + // end service Tables diff --git a/cmd/workspace/users/users.go b/cmd/workspace/users/users.go index 570a8f79c..71fdcf9ed 100755 --- a/cmd/workspace/users/users.go +++ b/cmd/workspace/users/users.go @@ -265,7 +265,8 @@ func init() { // TODO: short flags patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) - // TODO: array: operations + // TODO: array: Operations + // TODO: array: schema } From adab9aa5d75979193ae218edcd17d8a9e3e097f6 Mon Sep 17 00:00:00 2001 From: Fabian Jakobs Date: Wed, 19 Jul 2023 14:06:58 +0200 Subject: [PATCH 006/139] Add support for more SDK config options (#587) ## Changes Add support for more SDK config options --- bundle/config/workspace.go | 18 ++++++++++++++---- bundle/schema/docs/bundle_descriptions.json | 18 +++++++++++++----- 2 files changed, 27 insertions(+), 9 deletions(-) diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index 1b6dc4cd5..ee09bb8b4 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -21,8 +21,13 @@ type Workspace struct { // // Generic attributes. - Host string `json:"host,omitempty"` - Profile string `json:"profile,omitempty"` + Host string `json:"host,omitempty"` + Profile string `json:"profile,omitempty"` + AuthType string `json:"auth_type,omitempty"` + MetadataServiceURL string `json:"metadata_service_url,omitempty"` + + // OAuth specific attributes. + ClientID string `json:"client_id,omitempty"` // Google specific attributes. GoogleServiceAccount string `json:"google_service_account,omitempty"` @@ -60,8 +65,13 @@ type Workspace struct { func (w *Workspace) Client() (*databricks.WorkspaceClient, error) { cfg := databricks.Config{ // Generic - Host: w.Host, - Profile: w.Profile, + Host: w.Host, + Profile: w.Profile, + AuthType: w.AuthType, + MetadataServiceURL: w.MetadataServiceURL, + + // OAuth + ClientID: w.ClientID, // Google GoogleServiceAccount: w.GoogleServiceAccount, diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 7734614ec..6c5850c12 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -1788,8 +1788,11 @@ "artifact_path": { "description": "The remote path to synchronize build artifacts to. This defaults to `${workspace.root}/artifacts`" }, + "auth_type": { + "description": "When multiple auth attributes are available in the environment, use the auth type specified by this argument" + }, "azure_client_id": { - "description": "" + "description": "Azure Client ID." }, "azure_environment": { "description": "Azure environment, one of (Public, UsGov, China, Germany)." @@ -1798,23 +1801,28 @@ "description": "Azure Login Application ID." }, "azure_tenant_id": { - "description": "" + "description": "Azure Tenant ID." }, "azure_use_msi": { - "description": "" + "description": "Whether to use Managed Service Identity (MSI) to authenticate to Azure Databricks." }, "azure_workspace_resource_id": { "description": "Azure Resource Manager ID for Azure Databricks workspace." }, + "client_id": { + "description": "OAath client ID for the Databricks workspace." + }, "file_path": { "description": "The remote path to synchronize local files artifacts to. This defaults to `${workspace.root}/files`" }, "google_service_account": { - "description": "" }, "host": { "description": "Host url of the workspace." }, + "metadata_service_url": { + "description": "The URL of the metadata service to use for authentication." + }, "profile": { "description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg." }, @@ -3591,4 +3599,4 @@ } } } -} \ No newline at end of file +} From 02dbac7b8a0697c056be3d666a526047ae480e73 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 21 Jul 2023 10:59:02 +0200 Subject: [PATCH 007/139] Add template renderer for Databricks templates (#589) ## Changes This PR adds the renderer struct, which is a walker that traverses templates and generates projects from them ## Tests Unit tests --- libs/template/helpers.go | 20 + libs/template/renderer.go | 316 +++++++++++++ libs/template/renderer_test.go | 438 ++++++++++++++++++ .../testdata/email/library/email.tmpl | 1 + .../template/testdata/email/template/my_email | 1 + .../executable-bit-read/template/not-a-script | 0 .../executable-bit-read/template/script.sh | 1 + libs/template/testdata/fail/template/hello | 1 + .../skip-all-files-in-cwd/template/file1 | 1 + .../skip-all-files-in-cwd/template/file2 | 1 + .../skip-all-files-in-cwd/template/file3 | 3 + .../skip-dir-eagerly/template/dir1/file1 | 1 + .../testdata/skip-dir-eagerly/template/file2 | 3 + .../skip-is-relative/template/dir1/dir2/file3 | 1 + .../skip-is-relative/template/dir1/file2 | 1 + .../testdata/skip-is-relative/template/file1 | 1 + .../testdata/skip/template/dir1/file4 | 0 .../testdata/skip/template/dir1/file5 | 0 .../testdata/skip/template/dir2/file6 | 0 libs/template/testdata/skip/template/file1 | 1 + libs/template/testdata/skip/template/file2 | 2 + libs/template/testdata/skip/template/file3 | 1 + .../testdata/walk/template/dir1/dir3/file3 | 1 + .../testdata/walk/template/dir2/file4 | 5 + libs/template/testdata/walk/template/file1 | 1 + libs/template/testdata/walk/template/file2 | 1 + 26 files changed, 802 insertions(+) create mode 100644 libs/template/helpers.go create mode 100644 libs/template/renderer.go create mode 100644 libs/template/renderer_test.go create mode 100644 libs/template/testdata/email/library/email.tmpl create mode 100644 libs/template/testdata/email/template/my_email create mode 100644 libs/template/testdata/executable-bit-read/template/not-a-script create mode 100755 libs/template/testdata/executable-bit-read/template/script.sh create mode 100644 libs/template/testdata/fail/template/hello create mode 100644 libs/template/testdata/skip-all-files-in-cwd/template/file1 create mode 100644 libs/template/testdata/skip-all-files-in-cwd/template/file2 create mode 100644 libs/template/testdata/skip-all-files-in-cwd/template/file3 create mode 100644 libs/template/testdata/skip-dir-eagerly/template/dir1/file1 create mode 100644 libs/template/testdata/skip-dir-eagerly/template/file2 create mode 100644 libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 create mode 100644 libs/template/testdata/skip-is-relative/template/dir1/file2 create mode 100644 libs/template/testdata/skip-is-relative/template/file1 create mode 100644 libs/template/testdata/skip/template/dir1/file4 create mode 100644 libs/template/testdata/skip/template/dir1/file5 create mode 100644 libs/template/testdata/skip/template/dir2/file6 create mode 100644 libs/template/testdata/skip/template/file1 create mode 100644 libs/template/testdata/skip/template/file2 create mode 100644 libs/template/testdata/skip/template/file3 create mode 100644 libs/template/testdata/walk/template/dir1/dir3/file3 create mode 100644 libs/template/testdata/walk/template/dir2/file4 create mode 100644 libs/template/testdata/walk/template/file1 create mode 100644 libs/template/testdata/walk/template/file2 diff --git a/libs/template/helpers.go b/libs/template/helpers.go new file mode 100644 index 000000000..271fd539b --- /dev/null +++ b/libs/template/helpers.go @@ -0,0 +1,20 @@ +package template + +import ( + "fmt" + "text/template" +) + +type ErrFail struct { + msg string +} + +func (err ErrFail) Error() string { + return err.msg +} + +var helperFuncs = template.FuncMap{ + "fail": func(format string, args ...any) (any, error) { + return nil, ErrFail{fmt.Sprintf(format, args...)} + }, +} diff --git a/libs/template/renderer.go b/libs/template/renderer.go new file mode 100644 index 000000000..853e3505b --- /dev/null +++ b/libs/template/renderer.go @@ -0,0 +1,316 @@ +package template + +import ( + "context" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path" + "path/filepath" + "strings" + "text/template" + + "github.com/databricks/cli/libs/filer" + "github.com/databricks/cli/libs/log" + "github.com/databricks/databricks-sdk-go/logger" + "golang.org/x/exp/slices" +) + +type inMemoryFile struct { + // Root path for the project instance. This path uses the system's default + // file separator. For example /foo/bar on Unix and C:\foo\bar on windows + root string + + // Unix like relPath for the file (using '/' as the separator). This path + // is relative to the root. Using unix like relative paths enables skip patterns + // to work across both windows and unix based operating systems. + relPath string + content []byte + perm fs.FileMode +} + +func (f *inMemoryFile) fullPath() string { + return filepath.Join(f.root, filepath.FromSlash(f.relPath)) +} + +func (f *inMemoryFile) persistToDisk() error { + path := f.fullPath() + + err := os.MkdirAll(filepath.Dir(path), 0755) + if err != nil { + return err + } + return os.WriteFile(path, f.content, f.perm) +} + +// Renders a databricks template as a project +type renderer struct { + ctx context.Context + + // A config that is the "dot" value available to any template being rendered. + // Refer to https://pkg.go.dev/text/template for how templates can use + // this "dot" value + config map[string]any + + // A base template with helper functions and user defined templates in the + // library directory loaded. This is cloned for each project template computation + // during file tree walk + baseTemplate *template.Template + + // List of in memory files generated from template + files []*inMemoryFile + + // Glob patterns for files and directories to skip. There are three possible + // outcomes for skip: + // + // 1. File is not generated. This happens if one of the file's parent directories + // match a glob pattern + // + // 2. File is generated but not persisted to disk. This happens if the file itself + // matches a glob pattern, but none of it's parents match a glob pattern from the list + // + // 3. File is persisted to disk. This happens if the file and it's parent directories + // do not match any glob patterns from this list + skipPatterns []string + + // Filer rooted at template root. The file tree from this root is walked to + // generate the project + templateFiler filer.Filer + + // Root directory for the project instantiated from the template + instanceRoot string +} + +func newRenderer(ctx context.Context, config map[string]any, templateRoot, libraryRoot, instanceRoot string) (*renderer, error) { + // Initialize new template, with helper functions loaded + tmpl := template.New("").Funcs(helperFuncs) + + // Load user defined associated templates from the library root + libraryGlob := filepath.Join(libraryRoot, "*") + matches, err := filepath.Glob(libraryGlob) + if err != nil { + return nil, err + } + if len(matches) != 0 { + tmpl, err = tmpl.ParseFiles(matches...) + if err != nil { + return nil, err + } + } + + templateFiler, err := filer.NewLocalClient(templateRoot) + if err != nil { + return nil, err + } + + ctx = log.NewContext(ctx, log.GetLogger(ctx).With("action", "initialize-template")) + + return &renderer{ + ctx: ctx, + config: config, + baseTemplate: tmpl, + files: make([]*inMemoryFile, 0), + skipPatterns: make([]string, 0), + templateFiler: templateFiler, + instanceRoot: instanceRoot, + }, nil +} + +// Executes the template by applying config on it. Returns the materialized template +// as a string +func (r *renderer) executeTemplate(templateDefinition string) (string, error) { + // Create copy of base template so as to not overwrite it + tmpl, err := r.baseTemplate.Clone() + if err != nil { + return "", err + } + + // Parse the template text + tmpl, err = tmpl.Parse(templateDefinition) + if err != nil { + return "", err + } + + // Execute template and get result + result := strings.Builder{} + err = tmpl.Execute(&result, r.config) + if err != nil { + return "", err + } + return result.String(), nil +} + +func (r *renderer) computeFile(relPathTemplate string) (*inMemoryFile, error) { + // read template file contents + templateReader, err := r.templateFiler.Read(r.ctx, relPathTemplate) + if err != nil { + return nil, err + } + contentTemplate, err := io.ReadAll(templateReader) + if err != nil { + return nil, err + } + + // read file permissions + info, err := r.templateFiler.Stat(r.ctx, relPathTemplate) + if err != nil { + return nil, err + } + perm := info.Mode().Perm() + + // execute the contents of the file as a template + content, err := r.executeTemplate(string(contentTemplate)) + // Capture errors caused by the "fail" helper function + if target := (&ErrFail{}); errors.As(err, target) { + return nil, target + } + if err != nil { + return nil, fmt.Errorf("failed to compute file content for %s. %w", relPathTemplate, err) + } + + // Execute relative path template to get materialized path for the file + relPath, err := r.executeTemplate(relPathTemplate) + if err != nil { + return nil, err + } + + return &inMemoryFile{ + root: r.instanceRoot, + relPath: relPath, + content: []byte(content), + perm: perm, + }, nil +} + + +// This function walks the template file tree to generate an in memory representation +// of a project. +// +// During file tree walk, in the current directory, we would like to determine +// all possible {{skip}} function calls before we process any of the directories +// so that we can skip them eagerly if needed. That is in the current working directory +// we would like to process all files before we process any of the directories. +// +// This is not possible using the std library WalkDir which processes the files in +// lexical order which is why this function implements BFS. +func (r *renderer) walk() error { + directories := []string{"."} + var currentDirectory string + + for len(directories) > 0 { + currentDirectory, directories = directories[0], directories[1:] + + // Skip current directory if it matches any of accumulated skip patterns + instanceDirectory, err := r.executeTemplate(currentDirectory) + if err != nil { + return err + } + isSkipped, err := r.isSkipped(instanceDirectory) + if err != nil { + return err + } + if isSkipped { + logger.Infof(r.ctx, "skipping directory: %s", instanceDirectory) + continue + } + + // Add skip function, which accumulates skip patterns relative to current + // directory + r.baseTemplate.Funcs(template.FuncMap{ + "skip": func(relPattern string) string { + // patterns are specified relative to current directory of the file + // the {{skip}} function is called from. + pattern := path.Join(currentDirectory, relPattern) + if !slices.Contains(r.skipPatterns, pattern) { + logger.Infof(r.ctx, "adding skip pattern: %s", pattern) + r.skipPatterns = append(r.skipPatterns, pattern) + } + // return empty string will print nothing at function call site + // when executing the template + return "" + }, + }) + + // Process all entries in current directory + // + // 1. For files: the templates in the file name and content are executed, and + // a in memory representation of the file is generated + // + // 2. For directories: They are appended to a slice, which acts as a queue + // allowing BFS traversal of the template file tree + entries, err := r.templateFiler.ReadDir(r.ctx, currentDirectory) + if err != nil { + return err + } + for _, entry := range entries { + if entry.IsDir() { + // Add to slice, for BFS traversal + directories = append(directories, path.Join(currentDirectory, entry.Name())) + continue + } + + // Generate in memory representation of file + f, err := r.computeFile(path.Join(currentDirectory, entry.Name())) + if err != nil { + return err + } + logger.Infof(r.ctx, "added file to list of in memory files: %s", f.relPath) + r.files = append(r.files, f) + } + + } + return nil +} + +func (r *renderer) persistToDisk() error { + // Accumulate files which we will persist, skipping files whose path matches + // any of the skip patterns + filesToPersist := make([]*inMemoryFile, 0) + for _, file := range r.files { + isSkipped, err := r.isSkipped(file.relPath) + if err != nil { + return err + } + if isSkipped { + log.Infof(r.ctx, "skipping file: %s", file.relPath) + continue + } + filesToPersist = append(filesToPersist, file) + } + + // Assert no conflicting files exist + for _, file := range filesToPersist { + path := file.fullPath() + _, err := os.Stat(path) + if err == nil { + return fmt.Errorf("failed to persist to disk, conflict with existing file: %s", path) + } + if err != nil && !os.IsNotExist(err) { + return fmt.Errorf("error while verifying file %s does not already exist: %w", path, err) + } + } + + // Persist files to disk + for _, file := range filesToPersist { + err := file.persistToDisk() + if err != nil { + return err + } + } + return nil +} + +func (r *renderer) isSkipped(filePath string) (bool, error) { + for _, pattern := range r.skipPatterns { + isMatch, err := path.Match(pattern, filePath) + if err != nil { + return false, err + } + if isMatch { + return true, nil + } + } + return false, nil +} diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go new file mode 100644 index 000000000..468c607f4 --- /dev/null +++ b/libs/template/renderer_test.go @@ -0,0 +1,438 @@ +package template + +import ( + "context" + "fmt" + "io/fs" + "os" + "path/filepath" + "runtime" + "strings" + "testing" + "text/template" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func assertFileContent(t *testing.T, path string, content string) { + b, err := os.ReadFile(path) + require.NoError(t, err) + assert.Equal(t, content, string(b)) +} + +func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) { + info, err := os.Stat(path) + require.NoError(t, err) + assert.Equal(t, perm, info.Mode().Perm()) +} + +func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) { + tmpDir := t.TempDir() + + r, err := newRenderer(context.Background(), nil, "./testdata/email/template", "./testdata/email/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + require.NoError(t, err) + + err = r.persistToDisk() + require.NoError(t, err) + + b, err := os.ReadFile(filepath.Join(tmpDir, "my_email")) + require.NoError(t, err) + assert.Equal(t, "shreyas.goenka@databricks.com", strings.Trim(string(b), "\n\r")) +} + +func TestRendererExecuteTemplate(t *testing.T) { + templateText := + `"{{.count}} items are made of {{.Material}}". +{{if eq .Animal "sheep" }} +Sheep wool is the best! +{{else}} +{{.Animal}} wool is not too bad... +{{end}} +My email is {{template "email"}} +` + + r := renderer{ + config: map[string]any{ + "Material": "wool", + "count": 1, + "Animal": "sheep", + }, + baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}shreyas.goenka@databricks.com{{end}}`)), + } + + statement, err := r.executeTemplate(templateText) + require.NoError(t, err) + assert.Contains(t, statement, `"1 items are made of wool"`) + assert.NotContains(t, statement, `cat wool is not too bad.."`) + assert.Contains(t, statement, "Sheep wool is the best!") + assert.Contains(t, statement, `My email is shreyas.goenka@databricks.com`) + + r = renderer{ + config: map[string]any{ + "Material": "wool", + "count": 1, + "Animal": "cat", + }, + baseTemplate: template.Must(template.New("base").Parse(`{{define "email"}}hrithik.roshan@databricks.com{{end}}`)), + } + + statement, err = r.executeTemplate(templateText) + require.NoError(t, err) + assert.Contains(t, statement, `"1 items are made of wool"`) + assert.Contains(t, statement, `cat wool is not too bad...`) + assert.NotContains(t, statement, "Sheep wool is the best!") + assert.Contains(t, statement, `My email is hrithik.roshan@databricks.com`) +} + +func TestRendererIsSkipped(t *testing.T) { + r := renderer{ + skipPatterns: []string{"a*", "*yz", "def", "a/b/*"}, + } + + // skipped paths + isSkipped, err := r.isSkipped("abc") + require.NoError(t, err) + assert.True(t, isSkipped) + + isSkipped, err = r.isSkipped("abcd") + require.NoError(t, err) + assert.True(t, isSkipped) + + isSkipped, err = r.isSkipped("a") + require.NoError(t, err) + assert.True(t, isSkipped) + + isSkipped, err = r.isSkipped("xxyz") + require.NoError(t, err) + assert.True(t, isSkipped) + + isSkipped, err = r.isSkipped("yz") + require.NoError(t, err) + assert.True(t, isSkipped) + + isSkipped, err = r.isSkipped("a/b/c") + require.NoError(t, err) + assert.True(t, isSkipped) + + // NOT skipped paths + isSkipped, err = r.isSkipped(".") + require.NoError(t, err) + assert.False(t, isSkipped) + + isSkipped, err = r.isSkipped("y") + require.NoError(t, err) + assert.False(t, isSkipped) + + isSkipped, err = r.isSkipped("z") + require.NoError(t, err) + assert.False(t, isSkipped) + + isSkipped, err = r.isSkipped("defg") + require.NoError(t, err) + assert.False(t, isSkipped) + + isSkipped, err = r.isSkipped("cat") + require.NoError(t, err) + assert.False(t, isSkipped) + + isSkipped, err = r.isSkipped("a/b/c/d") + require.NoError(t, err) + assert.False(t, isSkipped) +} + +func TestRendererPersistToDisk(t *testing.T) { + tmpDir := t.TempDir() + ctx := context.Background() + + r := &renderer{ + ctx: ctx, + instanceRoot: tmpDir, + skipPatterns: []string{"a/b/c", "mn*"}, + files: []*inMemoryFile{ + { + root: tmpDir, + relPath: "a/b/c", + content: nil, + perm: 0444, + }, + { + root: tmpDir, + relPath: "mno", + content: nil, + perm: 0444, + }, + { + root: tmpDir, + relPath: "a/b/d", + content: []byte("123"), + perm: 0444, + }, + { + root: tmpDir, + relPath: "mmnn", + content: []byte("456"), + perm: 0444, + }, + }, + } + + err := r.persistToDisk() + require.NoError(t, err) + + assert.NoFileExists(t, filepath.Join(tmpDir, "a", "b", "c")) + assert.NoFileExists(t, filepath.Join(tmpDir, "mno")) + + assertFileContent(t, filepath.Join(tmpDir, "a", "b", "d"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a", "b", "d"), 0444) + assertFileContent(t, filepath.Join(tmpDir, "mmnn"), "456") + assertFilePermissions(t, filepath.Join(tmpDir, "mmnn"), 0444) +} + +func TestRendererWalk(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/walk/template", "./testdata/walk/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + getContent := func(r *renderer, path string) string { + for _, f := range r.files { + if f.relPath == path { + return strings.Trim(string(f.content), "\r\n") + } + } + require.FailNow(t, "file is absent: "+path) + return "" + } + + assert.Len(t, r.files, 4) + assert.Equal(t, "file one", getContent(r, "file1")) + assert.Equal(t, "file two", getContent(r, "file2")) + assert.Equal(t, "file three", getContent(r, "dir1/dir3/file3")) + assert.Equal(t, "file four", getContent(r, "dir2/file4")) +} + +func TestRendererFailFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/fail/template", "./testdata/fail/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.Equal(t, "I am an error message", err.Error()) +} + +func TestRendererSkipsDirsEagerly(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/skip-dir-eagerly/template", "./testdata/skip-dir-eagerly/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + content := string(r.files[0].content) + assert.Equal(t, "I should be the only file created", strings.Trim(content, "\r\n")) +} + +func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/skip-all-files-in-cwd/template", "./testdata/skip-all-files-in-cwd/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + // All 3 files are executed and have in memory representations + require.Len(t, r.files, 3) + + err = r.persistToDisk() + require.NoError(t, err) + + entries, err := os.ReadDir(tmpDir) + require.NoError(t, err) + // Assert none of the files are persisted to disk, because of {{skip "*"}} + assert.Len(t, entries, 0) +} + +func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/skip-is-relative/template", "./testdata/skip-is-relative/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.skipPatterns, 3) + assert.Contains(t, r.skipPatterns, "a") + assert.Contains(t, r.skipPatterns, "dir1/b") + assert.Contains(t, r.skipPatterns, "dir1/dir2/c") +} + +func TestRendererSkip(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/skip/template", "./testdata/skip/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + // All 6 files are computed, even though "dir2/*" is present as a skip pattern + // This is because "dir2/*" matches the files in dir2, but not dir2 itself + assert.Len(t, r.files, 6) + + err = r.persistToDisk() + require.NoError(t, err) + + assert.FileExists(t, filepath.Join(tmpDir, "file1")) + assert.FileExists(t, filepath.Join(tmpDir, "file2")) + assert.FileExists(t, filepath.Join(tmpDir, "dir1/file5")) + + // These files have been skipped + assert.NoFileExists(t, filepath.Join(tmpDir, "file3")) + assert.NoFileExists(t, filepath.Join(tmpDir, "dir1/file4")) + assert.NoDirExists(t, filepath.Join(tmpDir, "dir2")) + assert.NoFileExists(t, filepath.Join(tmpDir, "dir2/file6")) +} + +func TestRendererInMemoryFileFullPathForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + f := &inMemoryFile{ + root: `c:\a\b\c`, + relPath: "d/e", + } + assert.Equal(t, `c:\a\b\c\d\e`, f.fullPath()) +} + +func TestRendererInMemoryFilePersistToDiskSetsExecutableBit(t *testing.T) { + if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { + t.SkipNow() + } + tmpDir := t.TempDir() + + f := &inMemoryFile{ + root: tmpDir, + relPath: "a/b/c", + content: []byte("123"), + perm: 0755, + } + err := f.persistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), 0755) +} + +func TestRendererInMemoryFilePersistToDiskForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + tmpDir := t.TempDir() + + f := &inMemoryFile{ + root: tmpDir, + relPath: "a/b/c", + content: []byte("123"), + perm: 0666, + } + err := f.persistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), 0666) +} + +func TestRendererReadsPermissionsBits(t *testing.T) { + if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { + t.SkipNow() + } + tmpDir := t.TempDir() + ctx := context.Background() + + r, err := newRenderer(ctx, nil, "./testdata/executable-bit-read/template", "./testdata/executable-bit-read/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + getPermissions := func(r *renderer, path string) fs.FileMode { + for _, f := range r.files { + if f.relPath == path { + return f.perm + } + } + require.FailNow(t, "file is absent: "+path) + return 0 + } + + assert.Len(t, r.files, 2) + assert.Equal(t, getPermissions(r, "script.sh"), fs.FileMode(0755)) + assert.Equal(t, getPermissions(r, "not-a-script"), fs.FileMode(0644)) +} + +func TestRendererErrorOnConflictingFile(t *testing.T) { + tmpDir := t.TempDir() + + f, err := os.Create(filepath.Join(tmpDir, "a")) + require.NoError(t, err) + err = f.Close() + require.NoError(t, err) + + r := renderer{ + skipPatterns: []string{}, + files: []*inMemoryFile{ + { + root: tmpDir, + relPath: "a", + content: []byte("123"), + perm: 0444, + }, + }, + } + err = r.persistToDisk() + assert.EqualError(t, err, fmt.Sprintf("failed to persist to disk, conflict with existing file: %s", filepath.Join(tmpDir, "a"))) +} + +func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { + tmpDir := t.TempDir() + ctx := context.Background() + + f, err := os.Create(filepath.Join(tmpDir, "a")) + require.NoError(t, err) + err = f.Close() + require.NoError(t, err) + + r := renderer{ + ctx: ctx, + skipPatterns: []string{"a"}, + files: []*inMemoryFile{ + { + root: tmpDir, + relPath: "a", + content: []byte("123"), + perm: 0444, + }, + }, + } + err = r.persistToDisk() + // No error is returned even though a conflicting file exists. This is because + // the generated file is being skipped + assert.NoError(t, err) + assert.Len(t, r.files, 1) +} diff --git a/libs/template/testdata/email/library/email.tmpl b/libs/template/testdata/email/library/email.tmpl new file mode 100644 index 000000000..1897d46b3 --- /dev/null +++ b/libs/template/testdata/email/library/email.tmpl @@ -0,0 +1 @@ +{{define "email"}}shreyas.goenka@databricks.com{{end}} diff --git a/libs/template/testdata/email/template/my_email b/libs/template/testdata/email/template/my_email new file mode 100644 index 000000000..0b74ef47c --- /dev/null +++ b/libs/template/testdata/email/template/my_email @@ -0,0 +1 @@ +{{template "email"}} diff --git a/libs/template/testdata/executable-bit-read/template/not-a-script b/libs/template/testdata/executable-bit-read/template/not-a-script new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/executable-bit-read/template/script.sh b/libs/template/testdata/executable-bit-read/template/script.sh new file mode 100755 index 000000000..09990d446 --- /dev/null +++ b/libs/template/testdata/executable-bit-read/template/script.sh @@ -0,0 +1 @@ +echo "hello" diff --git a/libs/template/testdata/fail/template/hello b/libs/template/testdata/fail/template/hello new file mode 100644 index 000000000..d9426f8b2 --- /dev/null +++ b/libs/template/testdata/fail/template/hello @@ -0,0 +1 @@ +{{fail "I am an error message"}} diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file1 b/libs/template/testdata/skip-all-files-in-cwd/template/file1 new file mode 100644 index 000000000..789819226 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file1 @@ -0,0 +1 @@ +a diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file2 b/libs/template/testdata/skip-all-files-in-cwd/template/file2 new file mode 100644 index 000000000..617807982 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file2 @@ -0,0 +1 @@ +b diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file3 b/libs/template/testdata/skip-all-files-in-cwd/template/file3 new file mode 100644 index 000000000..9411049f9 --- /dev/null +++ b/libs/template/testdata/skip-all-files-in-cwd/template/file3 @@ -0,0 +1,3 @@ +c + +{{skip "*"}} diff --git a/libs/template/testdata/skip-dir-eagerly/template/dir1/file1 b/libs/template/testdata/skip-dir-eagerly/template/dir1/file1 new file mode 100644 index 000000000..bbf6881bf --- /dev/null +++ b/libs/template/testdata/skip-dir-eagerly/template/dir1/file1 @@ -0,0 +1 @@ +{{fail "This template should never be executed"}} diff --git a/libs/template/testdata/skip-dir-eagerly/template/file2 b/libs/template/testdata/skip-dir-eagerly/template/file2 new file mode 100644 index 000000000..afdf908cb --- /dev/null +++ b/libs/template/testdata/skip-dir-eagerly/template/file2 @@ -0,0 +1,3 @@ +I should be the only file created + +{{skip "dir1"}} diff --git a/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 b/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 new file mode 100644 index 000000000..0f24f26d5 --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 @@ -0,0 +1 @@ +{{skip "c"}} diff --git a/libs/template/testdata/skip-is-relative/template/dir1/file2 b/libs/template/testdata/skip-is-relative/template/dir1/file2 new file mode 100644 index 000000000..53474b01e --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/dir1/file2 @@ -0,0 +1 @@ +{{skip "b"}} diff --git a/libs/template/testdata/skip-is-relative/template/file1 b/libs/template/testdata/skip-is-relative/template/file1 new file mode 100644 index 000000000..b74590a78 --- /dev/null +++ b/libs/template/testdata/skip-is-relative/template/file1 @@ -0,0 +1 @@ +{{skip "a"}} diff --git a/libs/template/testdata/skip/template/dir1/file4 b/libs/template/testdata/skip/template/dir1/file4 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/dir1/file5 b/libs/template/testdata/skip/template/dir1/file5 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/dir2/file6 b/libs/template/testdata/skip/template/dir2/file6 new file mode 100644 index 000000000..e69de29bb diff --git a/libs/template/testdata/skip/template/file1 b/libs/template/testdata/skip/template/file1 new file mode 100644 index 000000000..9c8752690 --- /dev/null +++ b/libs/template/testdata/skip/template/file1 @@ -0,0 +1 @@ +{{skip "file3"}} diff --git a/libs/template/testdata/skip/template/file2 b/libs/template/testdata/skip/template/file2 new file mode 100644 index 000000000..75db13eab --- /dev/null +++ b/libs/template/testdata/skip/template/file2 @@ -0,0 +1,2 @@ +{{skip "dir1/file4"}} +{{skip "dir2/*"}} diff --git a/libs/template/testdata/skip/template/file3 b/libs/template/testdata/skip/template/file3 new file mode 100644 index 000000000..8b1378917 --- /dev/null +++ b/libs/template/testdata/skip/template/file3 @@ -0,0 +1 @@ + diff --git a/libs/template/testdata/walk/template/dir1/dir3/file3 b/libs/template/testdata/walk/template/dir1/dir3/file3 new file mode 100644 index 000000000..8662caa51 --- /dev/null +++ b/libs/template/testdata/walk/template/dir1/dir3/file3 @@ -0,0 +1 @@ +file three diff --git a/libs/template/testdata/walk/template/dir2/file4 b/libs/template/testdata/walk/template/dir2/file4 new file mode 100644 index 000000000..53e66a681 --- /dev/null +++ b/libs/template/testdata/walk/template/dir2/file4 @@ -0,0 +1,5 @@ +{{if (eq 1 1)}} +file four +{{else}} +mathematics is a lie +{{end}} diff --git a/libs/template/testdata/walk/template/file1 b/libs/template/testdata/walk/template/file1 new file mode 100644 index 000000000..ce2b3df90 --- /dev/null +++ b/libs/template/testdata/walk/template/file1 @@ -0,0 +1 @@ +file one diff --git a/libs/template/testdata/walk/template/file2 b/libs/template/testdata/walk/template/file2 new file mode 100644 index 000000000..6c970dbb4 --- /dev/null +++ b/libs/template/testdata/walk/template/file2 @@ -0,0 +1 @@ +file two From 13731e144c01abb2a00f41bf726f86f1cbb3ae5a Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 21 Jul 2023 11:23:47 +0200 Subject: [PATCH 008/139] Fix formatting in renderer.go (#593) ## Changes Due to a bug in Github UI, https://github.com/databricks/cli/pull/589 got merged without passing the go/fmt formatting checks This PR fixes the formatting which breaks the PR checks --- libs/template/renderer.go | 1 - 1 file changed, 1 deletion(-) diff --git a/libs/template/renderer.go b/libs/template/renderer.go index 853e3505b..8502a9288 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -184,7 +184,6 @@ func (r *renderer) computeFile(relPathTemplate string) (*inMemoryFile, error) { }, nil } - // This function walks the template file tree to generate an in memory representation // of a project. // From fa37449f1fc97b234cc3472709ed500988c9ce0d Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 25 Jul 2023 10:00:46 +0200 Subject: [PATCH 009/139] Require include glob patterns to be explicitly defined (#602) ## Changes Before this PR we would load all yaml files matching * and \*/\*.yml files as bundle configurations. This was problematic since this would also load yaml files that were not meant to be a part of the bundle ## Tests Manually, now files are no longer included unless manually specified --- bundle/config/mutator/default_include.go | 36 ------------------- bundle/config/mutator/default_include_test.go | 18 ---------- bundle/config/mutator/mutator.go | 1 - bundle/config/root.go | 7 ++-- bundle/schema/docs/bundle_descriptions.json | 2 +- .../one_subconfiguration/databricks.yml | 3 ++ .../two_subconfigurations/databricks.yml | 3 ++ .../{my_second_job => }/resource.yml | 0 bundle/tests/include_default_test.go | 27 -------------- bundle/tests/include_multiple/databricks.yml | 5 +++ .../my_first_job/resource.yml} | 2 +- .../my_second_job/resource.yml | 4 +++ bundle/tests/include_override/databricks.yml | 7 ---- bundle/tests/include_override_test.go | 12 ------- bundle/tests/include_test.go | 24 +++++++++++++ 15 files changed, 43 insertions(+), 108 deletions(-) delete mode 100644 bundle/config/mutator/default_include.go delete mode 100644 bundle/config/mutator/default_include_test.go rename bundle/tests/include_default/{my_second_job => }/resource.yml (100%) delete mode 100644 bundle/tests/include_default_test.go create mode 100644 bundle/tests/include_multiple/databricks.yml rename bundle/tests/{include_override/this_file_isnt_included.yml => include_multiple/my_first_job/resource.yml} (53%) create mode 100644 bundle/tests/include_multiple/my_second_job/resource.yml delete mode 100644 bundle/tests/include_override/databricks.yml delete mode 100644 bundle/tests/include_override_test.go diff --git a/bundle/config/mutator/default_include.go b/bundle/config/mutator/default_include.go deleted file mode 100644 index baf052968..000000000 --- a/bundle/config/mutator/default_include.go +++ /dev/null @@ -1,36 +0,0 @@ -package mutator - -import ( - "context" - - "github.com/databricks/cli/bundle" - "golang.org/x/exp/slices" -) - -type defineDefaultInclude struct { - include []string -} - -// DefineDefaultInclude sets the list of includes to a default if it hasn't been set. -func DefineDefaultInclude() bundle.Mutator { - return &defineDefaultInclude{ - // When we support globstar we can collapse below into a single line. - include: []string{ - // Load YAML files in the same directory. - "*.yml", - // Load YAML files in subdirectories. - "*/*.yml", - }, - } -} - -func (m *defineDefaultInclude) Name() string { - return "DefineDefaultInclude" -} - -func (m *defineDefaultInclude) Apply(_ context.Context, b *bundle.Bundle) error { - if len(b.Config.Include) == 0 { - b.Config.Include = slices.Clone(m.include) - } - return nil -} diff --git a/bundle/config/mutator/default_include_test.go b/bundle/config/mutator/default_include_test.go deleted file mode 100644 index ac1c1d4ef..000000000 --- a/bundle/config/mutator/default_include_test.go +++ /dev/null @@ -1,18 +0,0 @@ -package mutator_test - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func TestDefaultInclude(t *testing.T) { - bundle := &bundle.Bundle{} - err := mutator.DefineDefaultInclude().Apply(context.Background(), bundle) - require.NoError(t, err) - assert.Equal(t, []string{"*.yml", "*/*.yml"}, bundle.Config.Include) -} diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 9a4486042..058258c87 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -6,7 +6,6 @@ import ( func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ - DefineDefaultInclude(), ProcessRootIncludes(), DefineDefaultEnvironment(), LoadGitDetails(), diff --git a/bundle/config/root.go b/bundle/config/root.go index 28b1a6158..f5a4f00d3 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -54,11 +54,8 @@ type Root struct { Bundle Bundle `json:"bundle"` // Include specifies a list of patterns of file names to load and - // merge into the this configuration. If not set in `databricks.yml`, - // it defaults to loading `*.yml` and `*/*.yml`. - // - // Also see [mutator.DefineDefaultInclude]. - // + // merge into the this configuration. Only includes defined in the root + // `databricks.yml` are processed. Defaults to an empty list. Include []string `json:"include,omitempty"` // Workspace contains details about the workspace to connect to diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 6c5850c12..2adb11f21 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -1838,7 +1838,7 @@ } }, "include": { - "description": "A list of patterns of file names to load and merge into the this configuration. It defaults to loading `*.yml` and `*/*.yml`.", + "description": "A list of glob patterns of files to load and merge into the this configuration. Defaults to no files being included.", "items": { "description": "" } diff --git a/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml b/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml index a81602920..ea4dec2e1 100644 --- a/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml +++ b/bundle/tests/conflicting_resource_ids/one_subconfiguration/databricks.yml @@ -4,6 +4,9 @@ bundle: workspace: profile: test +include: + - "*.yml" + resources: jobs: foo: diff --git a/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml b/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml index f8fe99ebc..c1da3eaeb 100644 --- a/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml +++ b/bundle/tests/conflicting_resource_ids/two_subconfigurations/databricks.yml @@ -3,3 +3,6 @@ bundle: workspace: profile: test + +include: + - "*.yml" diff --git a/bundle/tests/include_default/my_second_job/resource.yml b/bundle/tests/include_default/resource.yml similarity index 100% rename from bundle/tests/include_default/my_second_job/resource.yml rename to bundle/tests/include_default/resource.yml diff --git a/bundle/tests/include_default_test.go b/bundle/tests/include_default_test.go deleted file mode 100644 index dc7dbcd9c..000000000 --- a/bundle/tests/include_default_test.go +++ /dev/null @@ -1,27 +0,0 @@ -package config_tests - -import ( - "path/filepath" - "sort" - "testing" - - "github.com/stretchr/testify/assert" - "golang.org/x/exp/maps" -) - -func TestIncludeDefault(t *testing.T) { - b := load(t, "./include_default") - - // Test that both jobs were loaded. - keys := maps.Keys(b.Config.Resources.Jobs) - sort.Strings(keys) - assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys) - - first := b.Config.Resources.Jobs["my_first_job"] - assert.Equal(t, "1", first.ID) - assert.Equal(t, "include_default/my_first_job/resource.yml", filepath.ToSlash(first.ConfigFilePath)) - - second := b.Config.Resources.Jobs["my_second_job"] - assert.Equal(t, "2", second.ID) - assert.Equal(t, "include_default/my_second_job/resource.yml", filepath.ToSlash(second.ConfigFilePath)) -} diff --git a/bundle/tests/include_multiple/databricks.yml b/bundle/tests/include_multiple/databricks.yml new file mode 100644 index 000000000..ca3ff8545 --- /dev/null +++ b/bundle/tests/include_multiple/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: include_default + +include: + - "*/*.yml" diff --git a/bundle/tests/include_override/this_file_isnt_included.yml b/bundle/tests/include_multiple/my_first_job/resource.yml similarity index 53% rename from bundle/tests/include_override/this_file_isnt_included.yml rename to bundle/tests/include_multiple/my_first_job/resource.yml index c9ba1452f..c2be5a160 100644 --- a/bundle/tests/include_override/this_file_isnt_included.yml +++ b/bundle/tests/include_multiple/my_first_job/resource.yml @@ -1,4 +1,4 @@ resources: jobs: - this_job_isnt_defined: + my_first_job: id: 1 diff --git a/bundle/tests/include_multiple/my_second_job/resource.yml b/bundle/tests/include_multiple/my_second_job/resource.yml new file mode 100644 index 000000000..2c28c4622 --- /dev/null +++ b/bundle/tests/include_multiple/my_second_job/resource.yml @@ -0,0 +1,4 @@ +resources: + jobs: + my_second_job: + id: 2 diff --git a/bundle/tests/include_override/databricks.yml b/bundle/tests/include_override/databricks.yml deleted file mode 100644 index 02de362cd..000000000 --- a/bundle/tests/include_override/databricks.yml +++ /dev/null @@ -1,7 +0,0 @@ -bundle: - name: include_override - -# Setting this explicitly means default globs are not processed. -# As a result, ./this_file_isnt_included.yml isn't included. -include: - - doesnt-exist/*.yml diff --git a/bundle/tests/include_override_test.go b/bundle/tests/include_override_test.go deleted file mode 100644 index 0e18fab34..000000000 --- a/bundle/tests/include_override_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestIncludeOverride(t *testing.T) { - b := load(t, "./include_override") - assert.Empty(t, b.Config.Resources.Jobs) -} diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index d704b8380..00aecb9fd 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -32,3 +32,27 @@ func TestIncludeWithGlob(t *testing.T) { assert.Equal(t, "1", job.ID) assert.Equal(t, "include_with_glob/job.yml", filepath.ToSlash(job.ConfigFilePath)) } + +func TestIncludeDefault(t *testing.T) { + b := load(t, "./include_default") + + // No jobs should have been loaded + assert.Empty(t, b.Config.Resources.Jobs) +} + +func TestIncludeForMultipleMatches(t *testing.T) { + b := load(t, "./include_multiple") + + // Test that both jobs were loaded. + keys := maps.Keys(b.Config.Resources.Jobs) + sort.Strings(keys) + assert.Equal(t, []string{"my_first_job", "my_second_job"}, keys) + + first := b.Config.Resources.Jobs["my_first_job"] + assert.Equal(t, "1", first.ID) + assert.Equal(t, "include_multiple/my_first_job/resource.yml", filepath.ToSlash(first.ConfigFilePath)) + + second := b.Config.Resources.Jobs["my_second_job"] + assert.Equal(t, "2", second.ID) + assert.Equal(t, "include_multiple/my_second_job/resource.yml", filepath.ToSlash(second.ConfigFilePath)) +} From 9a88fa602d47f96953d7217530ee498a2f21b7a3 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 25 Jul 2023 13:35:08 +0200 Subject: [PATCH 010/139] Added support for artifacts building for bundles (#583) ## Changes Added support for artifacts building for bundles. Now it allows to specify `artifacts` block in bundle.yml and define a resource (at the moment Python wheel) to be build and uploaded during `bundle deploy` Built artifact will be automatically attached to corresponding job task or pipeline where it's used as a library Follow-ups: 1. If artifact is used in job or pipeline, but not found in the config, try to infer and build it anyway 2. If build command is not provided for Python wheel artifact, infer it --- bundle/artifacts/artifacts.go | 167 ++++++++++++++++++ bundle/artifacts/build.go | 22 ++- bundle/artifacts/notebook/build.go | 81 --------- bundle/artifacts/notebook/marker.go | 29 --- bundle/artifacts/notebook/upload.go | 60 ------- bundle/artifacts/upload.go | 35 +++- bundle/artifacts/whl/build.go | 66 +++++++ bundle/config/artifact.go | 76 ++++++-- bundle/libraries/libraries.go | 107 +++++++++++ bundle/phases/deploy.go | 3 + bundle/tests/bundle/python_wheel/.gitignore | 3 + bundle/tests/bundle/python_wheel/bundle.yml | 19 ++ .../bundle/python_wheel/my_test_code/setup.py | 15 ++ .../python_wheel/my_test_code/src/__init__.py | 2 + .../python_wheel/my_test_code/src/__main__.py | 16 ++ bundle/tests/bundle/wheel_test.go | 26 +++ python/utils.go | 48 +++++ python/wheel.go | 46 +---- 18 files changed, 596 insertions(+), 225 deletions(-) create mode 100644 bundle/artifacts/artifacts.go delete mode 100644 bundle/artifacts/notebook/build.go delete mode 100644 bundle/artifacts/notebook/marker.go delete mode 100644 bundle/artifacts/notebook/upload.go create mode 100644 bundle/artifacts/whl/build.go create mode 100644 bundle/libraries/libraries.go create mode 100644 bundle/tests/bundle/python_wheel/.gitignore create mode 100644 bundle/tests/bundle/python_wheel/bundle.yml create mode 100644 bundle/tests/bundle/python_wheel/my_test_code/setup.py create mode 100644 bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py create mode 100644 bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py create mode 100644 bundle/tests/bundle/wheel_test.go create mode 100644 python/utils.go diff --git a/bundle/artifacts/artifacts.go b/bundle/artifacts/artifacts.go new file mode 100644 index 000000000..c54131217 --- /dev/null +++ b/bundle/artifacts/artifacts.go @@ -0,0 +1,167 @@ +package artifacts + +import ( + "context" + "crypto/sha256" + "encoding/base64" + "errors" + "fmt" + "os" + "path" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/workspace" +) + +type mutatorFactory = func(name string) bundle.Mutator + +var buildMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ + config.ArtifactPythonWheel: whl.Build, +} + +var uploadMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{} + +func getBuildMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := buildMutators[t] + if !ok { + mutatorFactory = BasicBuild + } + + return mutatorFactory(name) +} + +func getUploadMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := uploadMutators[t] + if !ok { + mutatorFactory = BasicUpload + } + + return mutatorFactory(name) +} + +// Basic Build defines a general build mutator which builds artifact based on artifact.BuildCommand +type basicBuild struct { + name string +} + +func BasicBuild(name string) bundle.Mutator { + return &basicBuild{name: name} +} + +func (m *basicBuild) Name() string { + return fmt.Sprintf("artifacts.Build(%s)", m.name) +} + +func (m *basicBuild) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Build(%s): Building...", m.name)) + + out, err := artifact.Build(ctx) + if err != nil { + return fmt.Errorf("artifacts.Build(%s): %w, output: %s", m.name, err, out) + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Build(%s): Build succeeded", m.name)) + + return nil +} + +// Basic Upload defines a general upload mutator which uploads artifact as a library to workspace +type basicUpload struct { + name string +} + +func BasicUpload(name string) bundle.Mutator { + return &basicUpload{name: name} +} + +func (m *basicUpload) Name() string { + return fmt.Sprintf("artifacts.Build(%s)", m.name) +} + +func (m *basicUpload) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + if len(artifact.Files) == 0 { + return fmt.Errorf("artifact source is not configured: %s", m.name) + } + + err := uploadArtifact(ctx, artifact, b) + if err != nil { + return fmt.Errorf("artifacts.Upload(%s): %w", m.name, err) + } + + return nil +} + +func uploadArtifact(ctx context.Context, a *config.Artifact, b *bundle.Bundle) error { + for i := range a.Files { + f := &a.Files[i] + if f.NeedsUpload() { + filename := path.Base(f.Source) + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Upload(%s): Uploading...", filename)) + remotePath, err := uploadArtifactFile(ctx, f.Source, b) + if err != nil { + return err + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.Upload(%s): Upload succeeded", filename)) + + f.RemotePath = remotePath + } + } + + a.NormalisePaths() + return nil +} + +// Function to upload artifact file to Workspace +func uploadArtifactFile(ctx context.Context, file string, b *bundle.Bundle) (string, error) { + raw, err := os.ReadFile(file) + if err != nil { + return "", fmt.Errorf("unable to read %s: %w", file, errors.Unwrap(err)) + } + + uploadPath, err := getUploadBasePath(b) + if err != nil { + return "", err + } + + fileHash := sha256.Sum256(raw) + remotePath := path.Join(uploadPath, fmt.Sprintf("%x", fileHash), path.Base(file)) + // Make sure target directory exists. + err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, path.Dir(remotePath)) + if err != nil { + return "", fmt.Errorf("unable to create directory for %s: %w", remotePath, err) + } + + // Import to workspace. + err = b.WorkspaceClient().Workspace.Import(ctx, workspace.Import{ + Path: remotePath, + Overwrite: true, + Format: workspace.ImportFormatAuto, + Content: base64.StdEncoding.EncodeToString(raw), + }) + if err != nil { + return "", fmt.Errorf("unable to import %s: %w", remotePath, err) + } + + return remotePath, nil +} + +func getUploadBasePath(b *bundle.Bundle) (string, error) { + artifactPath := b.Config.Workspace.ArtifactsPath + if artifactPath == "" { + return "", fmt.Errorf("remote artifact path not configured") + } + + return path.Join(artifactPath, ".internal"), nil +} diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index 294351f41..7721635a8 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -3,9 +3,9 @@ package artifacts import ( "context" "fmt" + "path/filepath" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/artifacts/notebook" ) func BuildAll() bundle.Mutator { @@ -33,9 +33,23 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if artifact.Notebook != nil { - return bundle.Apply(ctx, b, notebook.Build(m.name)) + if len(artifact.Files) == 0 && artifact.BuildCommand == "" { + return fmt.Errorf("artifact %s misconfigured: 'files' or 'build' property is required", m.name) } - return nil + // If artifact file is explicitly defined, skip building the artifact + if len(artifact.Files) != 0 { + return nil + } + + // If artifact path is not provided, use bundle root dir + if artifact.Path == "" { + artifact.Path = b.Config.Path + } + + if !filepath.IsAbs(artifact.Path) { + artifact.Path = filepath.Join(b.Config.Path, artifact.Path) + } + + return bundle.Apply(ctx, b, getBuildMutator(artifact.Type, m.name)) } diff --git a/bundle/artifacts/notebook/build.go b/bundle/artifacts/notebook/build.go deleted file mode 100644 index 4a25868a9..000000000 --- a/bundle/artifacts/notebook/build.go +++ /dev/null @@ -1,81 +0,0 @@ -package notebook - -import ( - "context" - "errors" - "fmt" - "os" - "path" - "path/filepath" - "strings" - - "github.com/databricks/cli/bundle" - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -type build struct { - name string -} - -func Build(name string) bundle.Mutator { - return &build{ - name: name, - } -} - -func (m *build) Name() string { - return fmt.Sprintf("notebook.Build(%s)", m.name) -} - -func (m *build) Apply(_ context.Context, b *bundle.Bundle) error { - a, ok := b.Config.Artifacts[m.name] - if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) - } - - artifact := a.Notebook - - // Check if the filetype is supported. - switch ext := strings.ToLower(filepath.Ext(artifact.Path)); ext { - case ".py": - artifact.Language = workspace.LanguagePython - case ".scala": - artifact.Language = workspace.LanguageScala - case ".sql": - artifact.Language = workspace.LanguageSql - default: - return fmt.Errorf("invalid notebook extension: %s", ext) - } - - // Open underlying file. - f, err := os.Open(filepath.Join(b.Config.Path, artifact.Path)) - if err != nil { - return fmt.Errorf("unable to open artifact file %s: %w", artifact.Path, errors.Unwrap(err)) - } - defer f.Close() - - // Check that the file contains the notebook marker on its first line. - ok, err = hasMarker(artifact.Language, f) - if err != nil { - return fmt.Errorf("unable to read artifact file %s: %s", artifact.Path, errors.Unwrap(err)) - } - if !ok { - return fmt.Errorf("notebook marker not found in %s", artifact.Path) - } - - // Check that an artifact path is defined. - remotePath := b.Config.Workspace.ArtifactsPath - if remotePath == "" { - return fmt.Errorf("remote artifact path not configured") - } - - // Store absolute paths. - artifact.LocalPath = filepath.Join(b.Config.Path, artifact.Path) - artifact.RemotePath = path.Join(remotePath, stripExtension(artifact.Path)) - return nil -} - -func stripExtension(path string) string { - ext := filepath.Ext(path) - return path[0 : len(path)-len(ext)] -} diff --git a/bundle/artifacts/notebook/marker.go b/bundle/artifacts/notebook/marker.go deleted file mode 100644 index a04ca9895..000000000 --- a/bundle/artifacts/notebook/marker.go +++ /dev/null @@ -1,29 +0,0 @@ -package notebook - -import ( - "bufio" - "io" - "strings" - - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -func hasMarker(l workspace.Language, r io.Reader) (bool, error) { - scanner := bufio.NewScanner(r) - ok := scanner.Scan() - if !ok { - return false, scanner.Err() - } - - line := strings.TrimSpace(scanner.Text()) - switch l { - case workspace.LanguagePython: - return line == "# Databricks notebook source", nil - case workspace.LanguageScala: - return line == "// Databricks notebook source", nil - case workspace.LanguageSql: - return line == "-- Databricks notebook source", nil - default: - panic("language not handled: " + l) - } -} diff --git a/bundle/artifacts/notebook/upload.go b/bundle/artifacts/notebook/upload.go deleted file mode 100644 index 38ac9d615..000000000 --- a/bundle/artifacts/notebook/upload.go +++ /dev/null @@ -1,60 +0,0 @@ -package notebook - -import ( - "context" - "encoding/base64" - "errors" - "fmt" - "os" - "path" - - "github.com/databricks/cli/bundle" - "github.com/databricks/databricks-sdk-go/service/workspace" -) - -type upload struct { - name string -} - -func Upload(name string) bundle.Mutator { - return &upload{ - name: name, - } -} - -func (m *upload) Name() string { - return fmt.Sprintf("notebook.Upload(%s)", m.name) -} - -func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { - a, ok := b.Config.Artifacts[m.name] - if !ok { - return fmt.Errorf("artifact doesn't exist: %s", m.name) - } - - artifact := a.Notebook - raw, err := os.ReadFile(artifact.LocalPath) - if err != nil { - return fmt.Errorf("unable to read %s: %w", m.name, errors.Unwrap(err)) - } - - // Make sure target directory exists. - err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, path.Dir(artifact.RemotePath)) - if err != nil { - return fmt.Errorf("unable to create directory for %s: %w", m.name, err) - } - - // Import to workspace. - err = b.WorkspaceClient().Workspace.Import(ctx, workspace.Import{ - Path: artifact.RemotePath, - Overwrite: true, - Format: workspace.ImportFormatSource, - Language: artifact.Language, - Content: base64.StdEncoding.EncodeToString(raw), - }) - if err != nil { - return fmt.Errorf("unable to import %s: %w", m.name, err) - } - - return nil -} diff --git a/bundle/artifacts/upload.go b/bundle/artifacts/upload.go index f5ce2b23e..990718aa4 100644 --- a/bundle/artifacts/upload.go +++ b/bundle/artifacts/upload.go @@ -5,7 +5,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/artifacts/notebook" + "github.com/databricks/databricks-sdk-go/service/workspace" ) func UploadAll() bundle.Mutator { @@ -15,6 +15,10 @@ func UploadAll() bundle.Mutator { } } +func CleanUp() bundle.Mutator { + return &cleanUp{} +} + type upload struct { name string } @@ -33,8 +37,33 @@ func (m *upload) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if artifact.Notebook != nil { - return bundle.Apply(ctx, b, notebook.Upload(m.name)) + if len(artifact.Files) == 0 { + return fmt.Errorf("artifact source is not configured: %s", m.name) + } + + return bundle.Apply(ctx, b, getUploadMutator(artifact.Type, m.name)) +} + +type cleanUp struct{} + +func (m *cleanUp) Name() string { + return "artifacts.CleanUp" +} + +func (m *cleanUp) Apply(ctx context.Context, b *bundle.Bundle) error { + uploadPath, err := getUploadBasePath(b) + if err != nil { + return err + } + + b.WorkspaceClient().Workspace.Delete(ctx, workspace.Delete{ + Path: uploadPath, + Recursive: true, + }) + + err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, uploadPath) + if err != nil { + return fmt.Errorf("unable to create directory for %s: %w", uploadPath, err) } return nil diff --git a/bundle/artifacts/whl/build.go b/bundle/artifacts/whl/build.go new file mode 100644 index 000000000..4ee47153b --- /dev/null +++ b/bundle/artifacts/whl/build.go @@ -0,0 +1,66 @@ +package whl + +import ( + "context" + "fmt" + "os" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/python" +) + +type build struct { + name string +} + +func Build(name string) bundle.Mutator { + return &build{ + name: name, + } +} + +func (m *build) Name() string { + return fmt.Sprintf("artifacts.whl.Build(%s)", m.name) +} + +func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + // TODO: If not set, BuildCommand should be infer prior to this + // via a mutator so that it can be observable. + if artifact.BuildCommand == "" { + return fmt.Errorf("artifacts.whl.Build(%s): missing build property for the artifact", m.name) + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.Build(%s): Building...", m.name)) + + dir := artifact.Path + + distPath := filepath.Join(dir, "dist") + os.RemoveAll(distPath) + python.CleanupWheelFolder(dir) + + out, err := artifact.Build(ctx) + if err != nil { + return fmt.Errorf("artifacts.whl.Build(%s): Failed %w, output: %s", m.name, err, out) + } + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.Build(%s): Build succeeded", m.name)) + + wheels := python.FindFilesWithSuffixInPath(distPath, ".whl") + if len(wheels) == 0 { + return fmt.Errorf("artifacts.whl.Build(%s): cannot find built wheel in %s", m.name, dir) + } + for _, wheel := range wheels { + artifact.Files = append(artifact.Files, config.ArtifactFile{ + Source: wheel, + }) + } + + return nil +} diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index f782fcfcd..1ac371e93 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -1,20 +1,76 @@ package config -import "github.com/databricks/databricks-sdk-go/service/workspace" +import ( + "context" + "fmt" + "os/exec" + "path" + "strings" + + "github.com/databricks/databricks-sdk-go/service/compute" +) + +type ArtifactType string + +const ArtifactPythonWheel ArtifactType = `whl` + +type ArtifactFile struct { + Source string `json:"source"` + RemotePath string `json:"-" bundle:"readonly"` + Libraries []*compute.Library `json:"-" bundle:"readonly"` +} // Artifact defines a single local code artifact that can be // built/uploaded/referenced in the context of this bundle. type Artifact struct { - Notebook *NotebookArtifact `json:"notebook,omitempty"` -} + Type ArtifactType `json:"type"` -type NotebookArtifact struct { + // The local path to the directory with a root of artifact, + // for example, where setup.py is for Python projects Path string `json:"path"` - // Language is detected during build step. - Language workspace.Language `json:"language,omitempty" bundle:"readonly"` - - // Paths are synthesized during build step. - LocalPath string `json:"local_path,omitempty" bundle:"readonly"` - RemotePath string `json:"remote_path,omitempty" bundle:"readonly"` + // The relative or absolute path to the built artifact files + // (Python wheel, Java jar and etc) itself + Files []ArtifactFile `json:"files"` + BuildCommand string `json:"build"` +} + +func (a *Artifact) Build(ctx context.Context) ([]byte, error) { + if a.BuildCommand == "" { + return nil, fmt.Errorf("no build property defined") + } + + buildParts := strings.Split(a.BuildCommand, " ") + cmd := exec.CommandContext(ctx, buildParts[0], buildParts[1:]...) + cmd.Dir = a.Path + return cmd.CombinedOutput() +} + +func (a *Artifact) NormalisePaths() { + for _, f := range a.Files { + // If no libraries attached, nothing to normalise, skipping + if f.Libraries == nil { + continue + } + + wsfsBase := "/Workspace" + remotePath := path.Join(wsfsBase, f.RemotePath) + for i := range f.Libraries { + lib := f.Libraries[i] + switch a.Type { + case ArtifactPythonWheel: + lib.Whl = remotePath + } + } + + } +} + +// This function determines if artifact files needs to be uploaded. +// During the bundle processing we analyse which library uses which artifact file. +// If artifact file is used as a library, we store the reference to this library in artifact file Libraries field. +// If artifact file has libraries it's been used in, it means than we need to upload this file. +// Otherwise this artifact file is not used and we skip uploading +func (af *ArtifactFile) NeedsUpload() bool { + return af.Libraries != nil } diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go new file mode 100644 index 000000000..ff86a34b5 --- /dev/null +++ b/bundle/libraries/libraries.go @@ -0,0 +1,107 @@ +package libraries + +import ( + "context" + "fmt" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type match struct { +} + +func MatchWithArtifacts() bundle.Mutator { + return &match{} +} + +func (a *match) Name() string { + return "libraries.MatchWithArtifacts" +} + +func (a *match) Apply(ctx context.Context, b *bundle.Bundle) error { + r := b.Config.Resources + for k := range b.Config.Resources.Jobs { + tasks := r.Jobs[k].JobSettings.Tasks + for i := range tasks { + task := &tasks[i] + if isMissingRequiredLibraries(task) { + return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) + } + for j := range task.Libraries { + lib := &task.Libraries[j] + err := findArtifactsAndMarkForUpload(ctx, lib, b) + if err != nil { + return err + } + } + } + } + return nil +} + +func isMissingRequiredLibraries(task *jobs.Task) bool { + if task.Libraries != nil { + return false + } + + return task.PythonWheelTask != nil || task.SparkJarTask != nil +} + +func findLibraryMatches(lib *compute.Library, b *bundle.Bundle) ([]string, error) { + path := libPath(lib) + if path == "" { + return nil, nil + } + + fullPath := filepath.Join(b.Config.Path, path) + return filepath.Glob(fullPath) +} + +func findArtifactsAndMarkForUpload(ctx context.Context, lib *compute.Library, b *bundle.Bundle) error { + matches, err := findLibraryMatches(lib, b) + if err != nil { + return err + } + + for _, match := range matches { + af, err := findArtifactFileByLocalPath(match, b) + if err != nil { + cmdio.LogString(ctx, fmt.Sprintf("%s. Skipping %s. In order to use the library upload it manually", err.Error(), match)) + } else { + af.Libraries = append(af.Libraries, lib) + } + } + + return nil +} + +func findArtifactFileByLocalPath(path string, b *bundle.Bundle) (*config.ArtifactFile, error) { + for _, a := range b.Config.Artifacts { + for k := range a.Files { + if a.Files[k].Source == path { + return &a.Files[k], nil + } + } + } + + return nil, fmt.Errorf("artifact file is not found for path %s", path) +} + +func libPath(library *compute.Library) string { + if library.Whl != "" { + return library.Whl + } + if library.Jar != "" { + return library.Jar + } + if library.Egg != "" { + return library.Egg + } + + return "" +} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index f2692ea9b..8b53273c7 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -6,6 +6,7 @@ import ( "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/libraries" ) // The deploy phase deploys artifacts and resources. @@ -15,6 +16,8 @@ func Deploy() bundle.Mutator { bundle.Defer( bundle.Seq( files.Upload(), + libraries.MatchWithArtifacts(), + artifacts.CleanUp(), artifacts.UploadAll(), terraform.Interpolate(), terraform.Write(), diff --git a/bundle/tests/bundle/python_wheel/.gitignore b/bundle/tests/bundle/python_wheel/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/bundle/python_wheel/bundle.yml new file mode 100644 index 000000000..9c518589d --- /dev/null +++ b/bundle/tests/bundle/python_wheel/bundle.yml @@ -0,0 +1,19 @@ +bundle: + name: python-wheel + +artifacts: + my_test_code: + type: whl + path: "./my_test_code" + build: "/usr/local/bin/python setup.py bdist_wheel" + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" diff --git a/bundle/tests/bundle/python_wheel/my_test_code/setup.py b/bundle/tests/bundle/python_wheel/my_test_code/setup.py new file mode 100644 index 000000000..0bd871dd3 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import src + +setup( + name="my_test_code", + version=src.__version__, + author=src.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["src"]), + entry_points={"group_1": "run=src.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py b/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/src/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py b/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py new file mode 100644 index 000000000..73d045afb --- /dev/null +++ b/bundle/tests/bundle/python_wheel/my_test_code/src/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print('Hello from my func') + print('Got arguments:') + print(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go new file mode 100644 index 000000000..9a6b2fd2c --- /dev/null +++ b/bundle/tests/bundle/wheel_test.go @@ -0,0 +1,26 @@ +package bundle + +import ( + "context" + "os" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/phases" + "github.com/databricks/cli/internal" + "github.com/stretchr/testify/require" +) + +func TestAccBundlePythonWheelBuild(t *testing.T) { + t.Log(internal.GetEnvOrSkipTest(t, "CLOUD_ENV")) + + b, err := bundle.Load("./python_wheel") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(context.Background(), b) + require.NoError(t, err) + + _, err = os.Stat("./python_wheel/my_test_code/dist/my_test_code-0.0.1-py2-none-any.whl") + require.NoError(t, err) +} diff --git a/python/utils.go b/python/utils.go new file mode 100644 index 000000000..10654edc0 --- /dev/null +++ b/python/utils.go @@ -0,0 +1,48 @@ +package python + +// TODO: move this package into the libs + +import ( + "context" + "os" + "path" + "strings" + + "github.com/databricks/cli/libs/log" +) + +func CleanupWheelFolder(dir string) { + // there or not there - we don't care + os.RemoveAll(path.Join(dir, "__pycache__")) + os.RemoveAll(path.Join(dir, "build")) + eggInfo := FindFilesWithSuffixInPath(dir, ".egg-info") + if len(eggInfo) == 0 { + return + } + for _, f := range eggInfo { + os.RemoveAll(f) + } +} + +func FindFilesWithSuffixInPath(dir, suffix string) []string { + f, err := os.Open(dir) + if err != nil { + log.Debugf(context.Background(), "open dir %s: %s", dir, err) + return nil + } + entries, err := f.ReadDir(0) + if err != nil { + log.Debugf(context.Background(), "read dir %s: %s", dir, err) + // todo: log + return nil + } + + files := make([]string, 0) + for _, child := range entries { + if !strings.HasSuffix(child.Name(), suffix) { + continue + } + files = append(files, path.Join(dir, child.Name())) + } + return files +} diff --git a/python/wheel.go b/python/wheel.go index ff05509dc..39c3d4cb4 100644 --- a/python/wheel.go +++ b/python/wheel.go @@ -6,7 +6,6 @@ import ( "io" "os" "path" - "strings" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" @@ -18,7 +17,7 @@ func BuildWheel(ctx context.Context, dir string) (string, error) { // remove previous dist leak os.RemoveAll("dist") // remove all other irrelevant traces - silentlyCleanupWheelFolder(".") + CleanupWheelFolder(".") // call simple wheel builder. we may need to pip install wheel as well out, err := Py(ctx, "setup.py", "bdist_wheel") if err != nil { @@ -27,13 +26,16 @@ func BuildWheel(ctx context.Context, dir string) (string, error) { log.Debugf(ctx, "Built wheel: %s", out) // and cleanup afterwards - silentlyCleanupWheelFolder(".") + CleanupWheelFolder(".") - wheel := silentChildWithSuffix("dist", ".whl") - if wheel == "" { + wheels := FindFilesWithSuffixInPath("dist", ".whl") + if len(wheels) == 0 { return "", fmt.Errorf("cannot find built wheel in %s", dir) } - return path.Join(dir, wheel), nil + if len(wheels) != 1 { + return "", fmt.Errorf("more than 1 wheel file found in %s", dir) + } + return path.Join(dir, wheels[0]), nil } const DBFSWheelLocation = "dbfs:/FileStore/wheels/simple" @@ -82,38 +84,6 @@ func UploadWheelToDBFSWithPEP503(ctx context.Context, dir string) (string, error return dbfsLoc, err } -func silentlyCleanupWheelFolder(dir string) { - // there or not there - we don't care - os.RemoveAll(path.Join(dir, "__pycache__")) - os.RemoveAll(path.Join(dir, "build")) - eggInfo := silentChildWithSuffix(dir, ".egg-info") - if eggInfo == "" { - return - } - os.RemoveAll(eggInfo) -} - -func silentChildWithSuffix(dir, suffix string) string { - f, err := os.Open(dir) - if err != nil { - log.Debugf(context.Background(), "open dir %s: %s", dir, err) - return "" - } - entries, err := f.ReadDir(0) - if err != nil { - log.Debugf(context.Background(), "read dir %s: %s", dir, err) - // todo: log - return "" - } - for _, child := range entries { - if !strings.HasSuffix(child.Name(), suffix) { - continue - } - return path.Join(dir, child.Name()) - } - return "" -} - func chdirAndBack(dir string) func() { wd, _ := os.Getwd() os.Chdir(dir) From 8fdc0fec81854d1b7b9013d0b09326d1d8d8303f Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 25 Jul 2023 15:36:20 +0200 Subject: [PATCH 011/139] Add support for cloning repositories (#544) ## Changes Adds support for cloning public and private github repositories for databricks templates ## Tests Integration tests --- internal/git_clone_test.go | 63 +++++++++++++++++++++++++++++++++ libs/git/clone.go | 72 ++++++++++++++++++++++++++++++++++++++ libs/git/clone_test.go | 34 ++++++++++++++++++ 3 files changed, 169 insertions(+) create mode 100644 internal/git_clone_test.go create mode 100644 libs/git/clone.go create mode 100644 libs/git/clone_test.go diff --git a/internal/git_clone_test.go b/internal/git_clone_test.go new file mode 100644 index 000000000..b280ebc7d --- /dev/null +++ b/internal/git_clone_test.go @@ -0,0 +1,63 @@ +package internal + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/assert" +) + +func TestAccGitClone(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + ctx := context.Background() + var err error + + err = git.Clone(ctx, "https://github.com/databricks/databricks-empty-ide-project.git", "", tmpDir) + assert.NoError(t, err) + + // assert repo content + assert.NoError(t, err) + b, err := os.ReadFile(filepath.Join(tmpDir, "README-IDE.md")) + assert.NoError(t, err) + assert.Contains(t, string(b), "This folder contains a project that was synchronized from an IDE.") + + // assert current branch is ide, ie default for the repo + b, err = os.ReadFile(filepath.Join(tmpDir, ".git/HEAD")) + assert.NoError(t, err) + assert.Contains(t, string(b), "ide") +} + +func TestAccGitCloneWithOnlyRepoNameOnAlternateBranch(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + ctx := context.Background() + var err error + + err = git.Clone(ctx, "notebook-best-practices", "dais-2022", tmpDir) + + // assert on repo content + assert.NoError(t, err) + b, err := os.ReadFile(filepath.Join(tmpDir, "README.md")) + assert.NoError(t, err) + assert.Contains(t, string(b), "Software engineering best practices for Databricks notebooks") + + // assert current branch is main, ie default for the repo + b, err = os.ReadFile(filepath.Join(tmpDir, ".git/HEAD")) + assert.NoError(t, err) + assert.Contains(t, string(b), "dais-2022") +} + +func TestAccGitCloneRepositoryDoesNotExist(t *testing.T) { + t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) + + tmpDir := t.TempDir() + + err := git.Clone(context.Background(), "doesnot-exist", "", tmpDir) + assert.Contains(t, err.Error(), `repository 'https://github.com/databricks/doesnot-exist/' not found`) +} diff --git a/libs/git/clone.go b/libs/git/clone.go new file mode 100644 index 000000000..ec663272d --- /dev/null +++ b/libs/git/clone.go @@ -0,0 +1,72 @@ +package git + +import ( + "bytes" + "context" + "errors" + "fmt" + "os/exec" + "regexp" + "strings" +) + +// source: https://stackoverflow.com/questions/59081778/rules-for-special-characters-in-github-repository-name +var githubRepoRegex = regexp.MustCompile(`^[\w-\.]+$`) + +const githubUrl = "https://github.com" +const databricksOrg = "databricks" + +type cloneOptions struct { + // Branch or tag to clone + Reference string + + // URL for the repository + RepositoryUrl string + + // Local path to clone repository at + TargetPath string +} + +func (opts cloneOptions) args() []string { + args := []string{"clone", opts.RepositoryUrl, opts.TargetPath, "--depth=1", "--no-tags"} + if opts.Reference != "" { + args = append(args, "--branch", opts.Reference) + } + return args +} + +func Clone(ctx context.Context, url, reference, targetPath string) error { + // We assume only the repository name has been if input does not contain any + // `/` characters and the url is only made up of alphanumeric characters and + // ".", "_" and "-". This repository is resolved again databricks github account. + fullUrl := url + if githubRepoRegex.MatchString(url) { + fullUrl = strings.Join([]string{githubUrl, databricksOrg, url}, "/") + } + + opts := cloneOptions{ + Reference: reference, + RepositoryUrl: fullUrl, + TargetPath: targetPath, + } + + cmd := exec.CommandContext(ctx, "git", opts.args()...) + var cmdErr bytes.Buffer + cmd.Stderr = &cmdErr + + // start git clone + err := cmd.Start() + if errors.Is(err, exec.ErrNotFound) { + return fmt.Errorf("please install git CLI to clone a repository: %w", err) + } + if err != nil { + return err + } + + // wait for git clone to complete + err = cmd.Wait() + if err != nil { + return fmt.Errorf("git clone failed: %w. %s", err, cmdErr.String()) + } + return nil +} diff --git a/libs/git/clone_test.go b/libs/git/clone_test.go new file mode 100644 index 000000000..8101178fb --- /dev/null +++ b/libs/git/clone_test.go @@ -0,0 +1,34 @@ +package git + +import ( + "context" + "os/exec" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGitCloneArgs(t *testing.T) { + // case: No branch / tag specified. In this case git clones the default branch + assert.Equal(t, []string{"clone", "abc", "/def", "--depth=1", "--no-tags"}, cloneOptions{ + Reference: "", + RepositoryUrl: "abc", + TargetPath: "/def", + }.args()) + + // case: A branch is specified. + assert.Equal(t, []string{"clone", "abc", "/def", "--depth=1", "--no-tags", "--branch", "my-branch"}, cloneOptions{ + Reference: "my-branch", + RepositoryUrl: "abc", + TargetPath: "/def", + }.args()) +} + +func TestGitCloneWithGitNotFound(t *testing.T) { + // We set $PATH here so the git CLI cannot be found by the clone function + t.Setenv("PATH", "") + tmpDir := t.TempDir() + + err := Clone(context.Background(), "abc", "", tmpDir) + assert.ErrorIs(t, err, exec.ErrNotFound) +} From 47640b8b945b02d78be60978342b8ec2616c8d0f Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 25 Jul 2023 16:42:53 +0200 Subject: [PATCH 012/139] Add regexp compile helper function for templates (#601) ## Tests unit test --- libs/template/helpers.go | 5 ++++ libs/template/helpers_test.go | 25 +++++++++++++++++++ .../regexp-compile/template/hello.tmpl | 5 ++++ 3 files changed, 35 insertions(+) create mode 100644 libs/template/helpers_test.go create mode 100644 libs/template/testdata/regexp-compile/template/hello.tmpl diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 271fd539b..342b3811d 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -2,6 +2,7 @@ package template import ( "fmt" + "regexp" "text/template" ) @@ -17,4 +18,8 @@ var helperFuncs = template.FuncMap{ "fail": func(format string, args ...any) (any, error) { return nil, ErrFail{fmt.Sprintf(format, args...)} }, + // Alias for https://pkg.go.dev/regexp#Compile. Allows usage of all methods of regexp.Regexp + "regexp": func(expr string) (*regexp.Regexp, error) { + return regexp.Compile(expr) + }, } diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go new file mode 100644 index 000000000..fbb66ae2a --- /dev/null +++ b/libs/template/helpers_test.go @@ -0,0 +1,25 @@ +package template + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestTemplateRegexpCompileFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/regexp-compile/template", "./testdata/regexp-compile/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + content := string(r.files[0].content) + assert.Contains(t, content, "0:food") + assert.Contains(t, content, "1:fool") +} diff --git a/libs/template/testdata/regexp-compile/template/hello.tmpl b/libs/template/testdata/regexp-compile/template/hello.tmpl new file mode 100644 index 000000000..5ea55d795 --- /dev/null +++ b/libs/template/testdata/regexp-compile/template/hello.tmpl @@ -0,0 +1,5 @@ +{{with (regexp "foo.?")}} +{{range $index, $element := (.FindAllString "seafood fool" -1) }} +{{print $index ":" $element}} +{{end}} +{{end}} From 34f196bb4eada73b97d468d121f6fca55317417d Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 25 Jul 2023 17:18:43 +0200 Subject: [PATCH 013/139] Add unit test that raw strings are printed as is (#599) ## Changes Add unit test that raw strings are printed as is. This method is useful to print text that would otherwise be interpreted a go text template. --- libs/template/helpers_test.go | 16 ++++++++++++++++ .../print-without-processing/template/hello.tmpl | 1 + 2 files changed, 17 insertions(+) create mode 100644 libs/template/testdata/print-without-processing/template/hello.tmpl diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index fbb66ae2a..51c470efc 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -2,12 +2,28 @@ package template import ( "context" + "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +func TestTemplatePrintStringWithoutProcessing(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/print-without-processing/template", "./testdata/print-without-processing/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + cleanContent := strings.Trim(string(r.files[0].content), "\n\r") + assert.Equal(t, `{{ fail "abc" }}`, cleanContent) +} + func TestTemplateRegexpCompileFunction(t *testing.T) { ctx := context.Background() tmpDir := t.TempDir() diff --git a/libs/template/testdata/print-without-processing/template/hello.tmpl b/libs/template/testdata/print-without-processing/template/hello.tmpl new file mode 100644 index 000000000..735d02099 --- /dev/null +++ b/libs/template/testdata/print-without-processing/template/hello.tmpl @@ -0,0 +1 @@ +{{`{{ fail "abc" }}`}} From 3fa400f00fc2e00630d42c6cdc19c8f8f4903bf6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 25 Jul 2023 20:19:07 +0200 Subject: [PATCH 014/139] Remove dependency on global state in generated commands (#595) ## Changes Generated commands relied on global variables for flags and request payloads. This is difficult to test if a sequence of tests tries to run the same command with various arguments because the global state causes test interference. Moreover, it is impossible to run tests in parallel. This change modifies the approach and turns every command group and command itself into a function that returns a `*cobra.Command`. All flags and request payloads are variables scoped to the command's initialization function. This means it is possible to construct independent copies of the CLI structure and fixes the test isolation issue. The scope of this change is only the generated commands. The other commands will be changed accordingly in subsequent changes. ## Tests Unit and integration tests pass. --- .codegen/cmds-account.go.tmpl | 25 +- .codegen/cmds-workspace.go.tmpl | 11 +- .codegen/service.go.tmpl | 147 +- cmd/account/access-control/access-control.go | 195 +- cmd/account/billable-usage/billable-usage.go | 86 +- cmd/account/budgets/budgets.go | 334 ++- cmd/account/cmd.go | 98 +- cmd/account/credentials/credentials.go | 273 ++- cmd/account/credentials/overrides.go | 11 +- .../custom-app-integration.go | 301 ++- .../encryption-keys/encryption-keys.go | 239 +- cmd/account/encryption-keys/overrides.go | 11 +- cmd/account/groups.go | 8 - cmd/account/groups/groups.go | 479 ++-- cmd/account/groups/overrides.go | 12 +- .../ip-access-lists/ip-access-lists.go | 387 ++-- cmd/account/log-delivery/log-delivery.go | 283 ++- .../metastore-assignments.go | 316 ++- cmd/account/metastores/metastores.go | 306 ++- cmd/account/networks/networks.go | 282 ++- cmd/account/networks/overrides.go | 11 +- .../o-auth-enrollment/o-auth-enrollment.go | 134 +- cmd/account/private-access/private-access.go | 348 +-- .../published-app-integration.go | 306 ++- .../service-principal-secrets.go | 196 +- cmd/account/service-principals/overrides.go | 12 +- .../service-principals/service-principals.go | 487 ++-- cmd/account/settings/settings.go | 206 +- .../storage-credentials.go | 318 ++- cmd/account/storage/overrides.go | 11 +- cmd/account/storage/storage.go | 273 ++- cmd/account/users/overrides.go | 12 +- cmd/account/users/users.go | 487 ++-- cmd/account/vpc-endpoints/vpc-endpoints.go | 284 ++- .../workspace-assignment.go | 251 ++- cmd/account/workspaces/overrides.go | 11 +- cmd/account/workspaces/workspaces.go | 399 ++-- cmd/cmd.go | 40 + cmd/root/root.go | 4 +- cmd/workspace/alerts/alerts.go | 334 ++- cmd/workspace/catalogs/catalogs.go | 322 ++- cmd/workspace/catalogs/overrides.go | 11 +- cmd/workspace/clean-rooms/clean-rooms.go | 313 ++- .../cluster-policies/cluster-policies.go | 387 ++-- cmd/workspace/cluster-policies/overrides.go | 15 +- cmd/workspace/clusters/clusters.go | 1235 ++++++----- cmd/workspace/clusters/overrides.go | 18 +- cmd/workspace/cmd.go | 155 +- cmd/workspace/connections/connections.go | 338 +-- cmd/workspace/current-user/current-user.go | 72 +- cmd/workspace/dashboards/dashboards.go | 371 ++-- cmd/workspace/dashboards/overrides.go | 12 +- cmd/workspace/data-sources/data-sources.go | 72 +- cmd/workspace/experiments/experiments.go | 1532 ++++++++----- .../external-locations/external-locations.go | 326 ++- cmd/workspace/external-locations/overrides.go | 11 +- cmd/workspace/functions/functions.go | 370 ++-- .../git-credentials/git-credentials.go | 361 +-- .../global-init-scripts.go | 344 +-- cmd/workspace/grants/grants.go | 204 +- cmd/workspace/groups.go | 9 - cmd/workspace/groups/groups.go | 479 ++-- cmd/workspace/groups/overrides.go | 12 +- .../instance-pools/instance-pools.go | 363 +-- cmd/workspace/instance-pools/overrides.go | 11 +- .../instance-profiles/instance-profiles.go | 262 ++- cmd/workspace/instance-profiles/overrides.go | 11 +- .../ip-access-lists/ip-access-lists.go | 387 ++-- cmd/workspace/ip-access-lists/overrides.go | 11 +- cmd/workspace/jobs/jobs.go | 1258 ++++++----- cmd/workspace/jobs/overrides.go | 15 +- cmd/workspace/libraries/libraries.go | 238 +- cmd/workspace/metastores/metastores.go | 708 +++--- cmd/workspace/metastores/overrides.go | 11 +- .../model-registry/model-registry.go | 1961 +++++++++++------ cmd/workspace/permissions/permissions.go | 260 ++- cmd/workspace/pipelines/pipelines.go | 870 +++++--- .../policy-families/policy-families.go | 148 +- cmd/workspace/providers/providers.go | 461 ++-- cmd/workspace/queries/overrides.go | 12 +- cmd/workspace/queries/queries.go | 460 ++-- cmd/workspace/query-history/overrides.go | 12 +- cmd/workspace/query-history/query-history.go | 94 +- .../recipient-activation.go | 140 +- cmd/workspace/recipients/recipients.go | 517 +++-- cmd/workspace/repos/overrides.go | 33 +- cmd/workspace/repos/repos.go | 381 ++-- cmd/workspace/schemas/overrides.go | 12 +- cmd/workspace/schemas/schemas.go | 379 ++-- cmd/workspace/secrets/overrides.go | 115 +- cmd/workspace/secrets/put_secret.go | 122 + cmd/workspace/secrets/secrets.go | 544 +++-- cmd/workspace/service-principals/overrides.go | 12 +- .../service-principals/service-principals.go | 487 ++-- .../serving-endpoints/serving-endpoints.go | 518 +++-- cmd/workspace/shares/shares.go | 430 ++-- .../storage-credentials/overrides.go | 11 +- .../storage-credentials.go | 446 ++-- .../system-schemas/system-schemas.go | 200 +- .../table-constraints/table-constraints.go | 139 +- cmd/workspace/tables/overrides.go | 12 +- cmd/workspace/tables/tables.go | 398 ++-- cmd/workspace/token-management/overrides.go | 12 +- .../token-management/token-management.go | 300 ++- cmd/workspace/tokens/overrides.go | 11 +- cmd/workspace/tokens/tokens.go | 218 +- cmd/workspace/users/overrides.go | 12 +- cmd/workspace/users/users.go | 487 ++-- cmd/workspace/volumes/volumes.go | 377 ++-- cmd/workspace/warehouses/overrides.go | 12 +- cmd/workspace/warehouses/warehouses.go | 708 +++--- .../workspace-bindings/workspace-bindings.go | 144 +- .../workspace-conf/workspace-conf.go | 144 +- cmd/workspace/workspace/export_dir.go | 68 +- cmd/workspace/workspace/import_dir.go | 62 +- cmd/workspace/workspace/overrides.go | 15 +- cmd/workspace/workspace/workspace.go | 449 ++-- internal/helpers.go | 4 +- main.go | 3 +- main_test.go | 4 +- 120 files changed, 19491 insertions(+), 10885 deletions(-) create mode 100644 cmd/cmd.go create mode 100644 cmd/workspace/secrets/put_secret.go diff --git a/.codegen/cmds-account.go.tmpl b/.codegen/cmds-account.go.tmpl index d31959248..f3da7e2c8 100644 --- a/.codegen/cmds-account.go.tmpl +++ b/.codegen/cmds-account.go.tmpl @@ -11,20 +11,21 @@ import ( {{.SnakeName}} "github.com/databricks/cli/cmd/account/{{(.TrimPrefix "account").KebabName}}"{{end}}{{end}}{{end}} ) -var accountCmd = &cobra.Command{ - Use: "account", - Short: `Databricks Account Commands`, -} - -func init() { - root.RootCmd.AddCommand(accountCmd) +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "account", + Short: `Databricks Account Commands`, + } {{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) -}} - accountCmd.AddCommand({{.SnakeName}}.Cmd) + cmd.AddCommand({{.SnakeName}}.New()) {{end}}{{end}}{{end}} - // Register commands with groups - {{range .Services}}{{if .IsAccounts}}{{if not (in $excludes .KebabName) -}} - {{.SnakeName}}.Cmd.GroupID = "{{ .Package.Name }}" - {{end}}{{end}}{{end}} + // Register all groups with the parent command. + groups := Groups() + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + return cmd } diff --git a/.codegen/cmds-workspace.go.tmpl b/.codegen/cmds-workspace.go.tmpl index d3da36554..013c62f88 100644 --- a/.codegen/cmds-workspace.go.tmpl +++ b/.codegen/cmds-workspace.go.tmpl @@ -10,13 +10,12 @@ import ( {{.SnakeName}} "github.com/databricks/cli/cmd/workspace/{{.KebabName}}"{{end}}{{end}}{{end}} ) -func init() { +func All() []*cobra.Command { + var out []*cobra.Command + {{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) -}} - root.RootCmd.AddCommand({{.SnakeName}}.Cmd) + out = append(out, {{.SnakeName}}.New()) {{end}}{{end}}{{end}} - // Register commands with groups - {{range .Services}}{{if not .IsAccounts}}{{if not (in $excludes .KebabName) -}} - {{.SnakeName}}.Cmd.GroupID = "{{ .Package.Name }}" - {{end}}{{end}}{{end}} + return out } diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index 76f4a94ee..91f2e5cf7 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -19,20 +19,34 @@ import ( {{end}} {{define "service"}} -var Cmd = &cobra.Command{ - Use: "{{(.TrimPrefix "account").KebabName}}", - {{- if .Description }} - Short: `{{.Summary | without "`"}}`, - Long: `{{.Comment " " 80 | without "`"}}`, - {{- end }} - Annotations: map[string]string{ - "package": "{{ .Package.Name }}", - }, - {{- if .IsPrivatePreview }} +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) - // This service is being previewed; hide from help output. - Hidden: true, - {{- end }} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "{{(.TrimPrefix "account").KebabName}}", + {{- if .Description }} + Short: `{{.Summary | without "`"}}`, + Long: `{{.Comment " " 80 | without "`"}}`, + {{- end }} + GroupID: "{{ .Package.Name }}", + Annotations: map[string]string{ + "package": "{{ .Package.Name }}", + }, + {{- if .IsPrivatePreview }} + + // This service is being previewed; hide from help output. + Hidden: true, + {{- end }} + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } {{- $serviceName := .KebabName -}} @@ -44,26 +58,39 @@ var Cmd = &cobra.Command{ {{end}} // start {{.KebabName}} command -{{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.HasRequiredNonBodyField)) -}} -{{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}} -{{- if .Request}} -var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} -{{- if $needJsonFlag}} -var {{.CamelName}}Json flags.JsonFlag -{{- end}} -{{end}} -{{if .Wait}}var {{.CamelName}}SkipWait bool -var {{.CamelName}}Timeout time.Duration{{end}} +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var {{.CamelName}}Overrides []func( + *cobra.Command, + {{- if .Request }} + *{{.Service.Package.Name}}.{{.Request.PascalName}}, + {{- end }} +) + +func new{{.PascalName}}() *cobra.Command { + cmd := &cobra.Command{} + + {{- $useJsonForAllFields := or .IsJsonOnly (and .Request (or (not .Request.IsAllRequiredFieldsPrimitive) .Request.HasRequiredNonBodyField)) -}} + {{- $needJsonFlag := or $useJsonForAllFields (and .Request (not .Request.IsOnlyPrimitiveFields)) -}} + + {{- if .Request}} + + var {{.CamelName}}Req {{.Service.Package.Name}}.{{.Request.PascalName}} + {{- if $needJsonFlag}} + var {{.CamelName}}Json flags.JsonFlag + {{- end}} + {{- end}} + + {{if .Wait}}var {{.CamelName}}SkipWait bool + var {{.CamelName}}Timeout time.Duration{{end}} -func init() { - Cmd.AddCommand({{.CamelName}}Cmd) {{if .Wait}} - {{.CamelName}}Cmd.Flags().BoolVar(&{{.CamelName}}SkipWait, "no-wait", {{.CamelName}}SkipWait, `do not wait to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) - {{.CamelName}}Cmd.Flags().DurationVar(&{{.CamelName}}Timeout, "timeout", {{.Wait.Timeout}}*time.Minute, `maximum amount of time to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) + cmd.Flags().BoolVar(&{{.CamelName}}SkipWait, "no-wait", {{.CamelName}}SkipWait, `do not wait to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) + cmd.Flags().DurationVar(&{{.CamelName}}Timeout, "timeout", {{.Wait.Timeout}}*time.Minute, `maximum amount of time to reach {{range $i, $e := .Wait.Success}}{{if $i}} or {{end}}{{.Content}}{{end}} state`) {{end -}} {{if .Request}}// TODO: short flags {{- if $needJsonFlag}} - {{.CamelName}}Cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&{{.CamelName}}Json, "json", `either inline JSON string or @path/to/file.json with request body`) {{- end}} {{$method := .}} {{ if not .IsJsonOnly }} @@ -74,38 +101,39 @@ func init() { {{else if .Entity.ArrayValue }}// TODO: array: {{.Name}} {{else if .Entity.MapValue }}// TODO: map via StringToStringVar: {{.Name}} {{else if .Entity.IsEmpty }}// TODO: output-only field - {{else if .Entity.Enum }}{{$method.CamelName}}Cmd.Flags().Var(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", `{{.Summary | without "`"}}`) - {{else}}{{$method.CamelName}}Cmd.Flags().{{template "arg-type" .Entity}}(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", {{$method.CamelName}}Req.{{.PascalName}}, `{{.Summary | without "`"}}`) + {{else if .Entity.Enum }}cmd.Flags().Var(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", `{{.Summary | without "`"}}`) + {{else}}cmd.Flags().{{template "arg-type" .Entity}}(&{{$method.CamelName}}Req.{{.PascalName}}, "{{.KebabName}}", {{$method.CamelName}}Req.{{.PascalName}}, `{{.Summary | without "`"}}`) {{end}} {{- end -}} {{- end}} {{- end}} {{end}} -} -{{- $excludeFromPrompts := list "workspace get-status" -}} -{{- $fullCommandName := (print $serviceName " " .KebabName) -}} -{{- $noPrompt := or .IsCrudCreate (in $excludeFromPrompts $fullCommandName) }} -{{ $hasPosArgs := and .Request (or .Request.IsAllRequiredFieldsPrimitive (eq .PascalName "RunNow")) -}} -{{- $hasSinglePosArg := and $hasPosArgs (eq 1 (len .Request.RequiredFields)) -}} -{{- $serviceHasNamedIdMap := and (and .Service.List .Service.List.NamedIdMap) (not (eq .PascalName "List")) -}} -{{- $hasIdPrompt := and (not $noPrompt) (and $hasSinglePosArg $serviceHasNamedIdMap) -}} -{{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} -{{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} -var {{.CamelName}}Cmd = &cobra.Command{ - Use: "{{.KebabName}}{{if $hasPosArgs}}{{range .Request.RequiredFields}} {{.ConstantName}}{{end}}{{end}}", + {{- $excludeFromPrompts := list "workspace get-status" -}} + {{- $fullCommandName := (print $serviceName " " .KebabName) -}} + {{- $noPrompt := or .IsCrudCreate (in $excludeFromPrompts $fullCommandName) }} + + {{- $hasPosArgs := and .Request (or .Request.IsAllRequiredFieldsPrimitive (eq .PascalName "RunNow")) -}} + {{- $hasSinglePosArg := and $hasPosArgs (eq 1 (len .Request.RequiredFields)) -}} + {{- $serviceHasNamedIdMap := and (and .Service.List .Service.List.NamedIdMap) (not (eq .PascalName "List")) -}} + {{- $hasIdPrompt := and (not $noPrompt) (and $hasSinglePosArg $serviceHasNamedIdMap) -}} + {{- $wait := and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}} + {{- $hasRequiredArgs := and (not $hasIdPrompt) $hasPosArgs -}} + + cmd.Use = "{{.KebabName}}{{if $hasPosArgs}}{{range .Request.RequiredFields}} {{.ConstantName}}{{end}}{{end}}" {{- if .Description }} - Short: `{{.Summary | without "`"}}`, - Long: `{{.Comment " " 80 | without "`"}}`, + cmd.Short = `{{.Summary | without "`"}}` + cmd.Long = `{{.Comment " " 80 | without "`"}}` {{- end }} {{- if .IsPrivatePreview }} // This command is being previewed; hide from help output. - Hidden: true, + cmd.Hidden = true {{- end }} - Annotations: map[string]string{},{{if $hasRequiredArgs }} - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + {{if $hasRequiredArgs }} + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs({{len .Request.RequiredFields}}) {{- if $useJsonForAllFields }} if cmd.Flags().Changed("json") { @@ -113,9 +141,10 @@ var {{.CamelName}}Cmd = &cobra.Command{ } {{- end }} return check(cmd, args) - },{{end}} - PreRunE: root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + {{end}} + cmd.PreRunE = root.Must{{if .Service.IsAccounts}}Account{{else}}Workspace{{end}}Client + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() {{if .Service.IsAccounts}}a := root.AccountClient(ctx){{else}}w := root.WorkspaceClient(ctx){{end}} {{- if .Request }} @@ -204,10 +233,24 @@ var {{.CamelName}}Cmd = &cobra.Command{ {{- else -}} {{template "method-call" .}} {{end -}} - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range {{.CamelName}}Overrides { + fn(cmd{{if .Request}}, &{{.CamelName}}Req{{end}}) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(new{{.PascalName}}()) + }) } {{end}} // end service {{.Name}}{{end}} diff --git a/cmd/account/access-control/access-control.go b/cmd/account/access-control/access-control.go index 5cec69a31..01c076fbd 100755 --- a/cmd/account/access-control/access-control.go +++ b/cmd/account/access-control/access-control.go @@ -12,42 +12,64 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "access-control", - Short: `These APIs manage access rules on resources in an account.`, - Long: `These APIs manage access rules on resources in an account. Currently, only +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "access-control", + Short: `These APIs manage access rules on resources in an account.`, + Long: `These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is called a rule set.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-assignable-roles-for-resource command -var getAssignableRolesForResourceReq iam.GetAssignableRolesForResourceRequest -func init() { - Cmd.AddCommand(getAssignableRolesForResourceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getAssignableRolesForResourceOverrides []func( + *cobra.Command, + *iam.GetAssignableRolesForResourceRequest, +) + +func newGetAssignableRolesForResource() *cobra.Command { + cmd := &cobra.Command{} + + var getAssignableRolesForResourceReq iam.GetAssignableRolesForResourceRequest + // TODO: short flags -} - -var getAssignableRolesForResourceCmd = &cobra.Command{ - Use: "get-assignable-roles-for-resource RESOURCE", - Short: `Get assignable roles for a resource.`, - Long: `Get assignable roles for a resource. + cmd.Use = "get-assignable-roles-for-resource RESOURCE" + cmd.Short = `Get assignable roles for a resource.` + cmd.Long = `Get assignable roles for a resource. Gets all the roles that can be granted on an account level resource. A role is grantable if the rule set on the resource can contain an access rule of the - role.`, + role.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -58,37 +80,59 @@ var getAssignableRolesForResourceCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getAssignableRolesForResourceOverrides { + fn(cmd, &getAssignableRolesForResourceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetAssignableRolesForResource()) + }) } // start get-rule-set command -var getRuleSetReq iam.GetRuleSetRequest -func init() { - Cmd.AddCommand(getRuleSetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRuleSetOverrides []func( + *cobra.Command, + *iam.GetRuleSetRequest, +) + +func newGetRuleSet() *cobra.Command { + cmd := &cobra.Command{} + + var getRuleSetReq iam.GetRuleSetRequest + // TODO: short flags -} - -var getRuleSetCmd = &cobra.Command{ - Use: "get-rule-set NAME ETAG", - Short: `Get a rule set.`, - Long: `Get a rule set. + cmd.Use = "get-rule-set NAME ETAG" + cmd.Short = `Get a rule set.` + cmd.Long = `Get a rule set. Get a rule set by its name. A rule set is always attached to a resource and contains a list of access rules on the said resource. Currently only a default - rule set for each resource is supported.`, + rule set for each resource is supported.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -100,35 +144,56 @@ var getRuleSetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRuleSetOverrides { + fn(cmd, &getRuleSetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRuleSet()) + }) } // start update-rule-set command -var updateRuleSetReq iam.UpdateRuleSetRequest -var updateRuleSetJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateRuleSetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateRuleSetOverrides []func( + *cobra.Command, + *iam.UpdateRuleSetRequest, +) + +func newUpdateRuleSet() *cobra.Command { + cmd := &cobra.Command{} + + var updateRuleSetReq iam.UpdateRuleSetRequest + var updateRuleSetJson flags.JsonFlag + // TODO: short flags - updateRuleSetCmd.Flags().Var(&updateRuleSetJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateRuleSetJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateRuleSetCmd = &cobra.Command{ - Use: "update-rule-set", - Short: `Update a rule set.`, - Long: `Update a rule set. + cmd.Use = "update-rule-set" + cmd.Short = `Update a rule set.` + cmd.Long = `Update a rule set. Replace the rules of a rule set. First, use get to read the current version of the rule set before modifying it. This pattern helps prevent conflicts between - concurrent updates.`, + concurrent updates.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -146,10 +211,24 @@ var updateRuleSetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateRuleSetOverrides { + fn(cmd, &updateRuleSetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateRuleSet()) + }) } // end service AccountAccessControl diff --git a/cmd/account/billable-usage/billable-usage.go b/cmd/account/billable-usage/billable-usage.go index babc7bc2c..b5b9749dc 100755 --- a/cmd/account/billable-usage/billable-usage.go +++ b/cmd/account/billable-usage/billable-usage.go @@ -8,31 +8,51 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "billable-usage", - Short: `This API allows you to download billable usage logs for the specified account and date range.`, - Long: `This API allows you to download billable usage logs for the specified account +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "billable-usage", + Short: `This API allows you to download billable usage logs for the specified account and date range.`, + Long: `This API allows you to download billable usage logs for the specified account and date range. This feature works with all account types.`, - Annotations: map[string]string{ - "package": "billing", - }, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start download command -var downloadReq billing.DownloadRequest -func init() { - Cmd.AddCommand(downloadCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var downloadOverrides []func( + *cobra.Command, + *billing.DownloadRequest, +) + +func newDownload() *cobra.Command { + cmd := &cobra.Command{} + + var downloadReq billing.DownloadRequest + // TODO: short flags - downloadCmd.Flags().BoolVar(&downloadReq.PersonalData, "personal-data", downloadReq.PersonalData, `Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators.`) + cmd.Flags().BoolVar(&downloadReq.PersonalData, "personal-data", downloadReq.PersonalData, `Specify whether to include personally identifiable information in the billable usage logs, for example the email addresses of cluster creators.`) -} - -var downloadCmd = &cobra.Command{ - Use: "download START_MONTH END_MONTH", - Short: `Return billable usage logs.`, - Long: `Return billable usage logs. + cmd.Use = "download START_MONTH END_MONTH" + cmd.Short = `Return billable usage logs.` + cmd.Long = `Return billable usage logs. Returns billable usage logs in CSV format for the specified account and date range. For the data schema, see [CSV file schema]. Note that this method might @@ -43,15 +63,17 @@ var downloadCmd = &cobra.Command{ this API may hit a timeout after a few minutes. If you experience this, try to mitigate by calling the API with narrower date ranges. - [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema`, + [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,10 +85,24 @@ var downloadCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range downloadOverrides { + fn(cmd, &downloadReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDownload()) + }) } // end service BillableUsage diff --git a/cmd/account/budgets/budgets.go b/cmd/account/budgets/budgets.go index 3e26b181a..ed8b4591a 100755 --- a/cmd/account/budgets/budgets.go +++ b/cmd/account/budgets/budgets.go @@ -12,40 +12,61 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "budgets", - Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`, - Long: `These APIs manage budget configuration including notifications for exceeding a - budget for a period. They can also retrieve the status of each budget.`, - Annotations: map[string]string{ - "package": "billing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) - // This service is being previewed; hide from help output. - Hidden: true, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "budgets", + Short: `These APIs manage budget configuration including notifications for exceeding a budget for a period.`, + Long: `These APIs manage budget configuration including notifications for exceeding a + budget for a period. They can also retrieve the status of each budget.`, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq billing.WrappedBudget -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *billing.WrappedBudget, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq billing.WrappedBudget + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new budget.`, - Long: `Create a new budget. + cmd.Use = "create" + cmd.Short = `Create a new budget.` + cmd.Long = `Create a new budget. - Creates a new budget in the specified account.`, + Creates a new budget in the specified account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,51 +84,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq billing.DeleteBudgetRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *billing.DeleteBudgetRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq billing.DeleteBudgetRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete BUDGET_ID", - Short: `Delete budget.`, - Long: `Delete budget. + cmd.Use = "delete BUDGET_ID" + cmd.Short = `Delete budget.` + cmd.Long = `Delete budget. - Deletes the budget specified by its UUID.`, + Deletes the budget specified by its UUID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down." - names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Budget ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have budget id") - } deleteReq.BudgetId = args[0] err = a.Budgets.Delete(ctx, deleteReq) @@ -115,52 +145,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq billing.GetBudgetRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *billing.GetBudgetRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq billing.GetBudgetRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get BUDGET_ID", - Short: `Get budget and its status.`, - Long: `Get budget and its status. + cmd.Use = "get BUDGET_ID" + cmd.Short = `Get budget and its status.` + cmd.Long = `Get budget and its status. Gets the budget specified by its UUID, including noncumulative status for each - day that the budget is configured to include.`, + day that the budget is configured to include.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down." - names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Budget ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have budget id") - } getReq.BudgetId = args[0] response, err := a.Budgets.Get(ctx, getReq) @@ -168,30 +207,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all budgets.`, - Long: `Get all budgets. + cmd.Use = "list" + cmd.Short = `Get all budgets.` + cmd.Long = `Get all budgets. Gets all budgets associated with this account, including noncumulative status - for each day that the budget is configured to include.`, + for each day that the budget is configured to include.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Budgets.ListAll(ctx) @@ -199,34 +256,55 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq billing.WrappedBudget -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *billing.WrappedBudget, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq billing.WrappedBudget + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Modify budget.`, - Long: `Modify budget. + cmd.Use = "update" + cmd.Short = `Modify budget.` + cmd.Long = `Modify budget. Modifies a budget in this account. Budget properties are completely - overwritten.`, + overwritten.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -244,10 +322,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Budgets diff --git a/cmd/account/cmd.go b/cmd/account/cmd.go index 923948b6b..294801a68 100644 --- a/cmd/account/cmd.go +++ b/cmd/account/cmd.go @@ -3,7 +3,6 @@ package account import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" account_access_control "github.com/databricks/cli/cmd/account/access-control" @@ -32,63 +31,42 @@ import ( workspaces "github.com/databricks/cli/cmd/account/workspaces" ) -var accountCmd = &cobra.Command{ - Use: "account", - Short: `Databricks Account Commands`, -} - -func init() { - root.RootCmd.AddCommand(accountCmd) - - accountCmd.AddCommand(account_access_control.Cmd) - accountCmd.AddCommand(billable_usage.Cmd) - accountCmd.AddCommand(budgets.Cmd) - accountCmd.AddCommand(credentials.Cmd) - accountCmd.AddCommand(custom_app_integration.Cmd) - accountCmd.AddCommand(encryption_keys.Cmd) - accountCmd.AddCommand(account_groups.Cmd) - accountCmd.AddCommand(account_ip_access_lists.Cmd) - accountCmd.AddCommand(log_delivery.Cmd) - accountCmd.AddCommand(account_metastore_assignments.Cmd) - accountCmd.AddCommand(account_metastores.Cmd) - accountCmd.AddCommand(networks.Cmd) - accountCmd.AddCommand(o_auth_enrollment.Cmd) - accountCmd.AddCommand(private_access.Cmd) - accountCmd.AddCommand(published_app_integration.Cmd) - accountCmd.AddCommand(service_principal_secrets.Cmd) - accountCmd.AddCommand(account_service_principals.Cmd) - accountCmd.AddCommand(account_settings.Cmd) - accountCmd.AddCommand(storage.Cmd) - accountCmd.AddCommand(account_storage_credentials.Cmd) - accountCmd.AddCommand(account_users.Cmd) - accountCmd.AddCommand(vpc_endpoints.Cmd) - accountCmd.AddCommand(workspace_assignment.Cmd) - accountCmd.AddCommand(workspaces.Cmd) - - // Register commands with groups - account_access_control.Cmd.GroupID = "iam" - billable_usage.Cmd.GroupID = "billing" - budgets.Cmd.GroupID = "billing" - credentials.Cmd.GroupID = "provisioning" - custom_app_integration.Cmd.GroupID = "oauth2" - encryption_keys.Cmd.GroupID = "provisioning" - account_groups.Cmd.GroupID = "iam" - account_ip_access_lists.Cmd.GroupID = "settings" - log_delivery.Cmd.GroupID = "billing" - account_metastore_assignments.Cmd.GroupID = "catalog" - account_metastores.Cmd.GroupID = "catalog" - networks.Cmd.GroupID = "provisioning" - o_auth_enrollment.Cmd.GroupID = "oauth2" - private_access.Cmd.GroupID = "provisioning" - published_app_integration.Cmd.GroupID = "oauth2" - service_principal_secrets.Cmd.GroupID = "oauth2" - account_service_principals.Cmd.GroupID = "iam" - account_settings.Cmd.GroupID = "settings" - storage.Cmd.GroupID = "provisioning" - account_storage_credentials.Cmd.GroupID = "catalog" - account_users.Cmd.GroupID = "iam" - vpc_endpoints.Cmd.GroupID = "provisioning" - workspace_assignment.Cmd.GroupID = "iam" - workspaces.Cmd.GroupID = "provisioning" - +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "account", + Short: `Databricks Account Commands`, + } + + cmd.AddCommand(account_access_control.New()) + cmd.AddCommand(billable_usage.New()) + cmd.AddCommand(budgets.New()) + cmd.AddCommand(credentials.New()) + cmd.AddCommand(custom_app_integration.New()) + cmd.AddCommand(encryption_keys.New()) + cmd.AddCommand(account_groups.New()) + cmd.AddCommand(account_ip_access_lists.New()) + cmd.AddCommand(log_delivery.New()) + cmd.AddCommand(account_metastore_assignments.New()) + cmd.AddCommand(account_metastores.New()) + cmd.AddCommand(networks.New()) + cmd.AddCommand(o_auth_enrollment.New()) + cmd.AddCommand(private_access.New()) + cmd.AddCommand(published_app_integration.New()) + cmd.AddCommand(service_principal_secrets.New()) + cmd.AddCommand(account_service_principals.New()) + cmd.AddCommand(account_settings.New()) + cmd.AddCommand(storage.New()) + cmd.AddCommand(account_storage_credentials.New()) + cmd.AddCommand(account_users.New()) + cmd.AddCommand(vpc_endpoints.New()) + cmd.AddCommand(workspace_assignment.New()) + cmd.AddCommand(workspaces.New()) + + // Register all groups with the parent command. + groups := Groups() + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + return cmd } diff --git a/cmd/account/credentials/credentials.go b/cmd/account/credentials/credentials.go index 5a1362d15..35c8869a8 100755 --- a/cmd/account/credentials/credentials.go +++ b/cmd/account/credentials/credentials.go @@ -12,34 +12,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "credentials", - Short: `These APIs manage credential configurations for this workspace.`, - Long: `These APIs manage credential configurations for this workspace. Databricks +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "credentials", + Short: `These APIs manage credential configurations for this workspace.`, + Long: `These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the new workspace. A credential configuration encapsulates this role information, and its ID is used when creating a new workspace.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateCredentialRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateCredentialRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateCredentialRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create credential configuration.`, - Long: `Create credential configuration. + cmd.Use = "create" + cmd.Short = `Create credential configuration.` + cmd.Long = `Create credential configuration. Creates a Databricks credential configuration that represents cloud cross-account credentials for a specified account. Databricks uses this to set @@ -54,11 +74,12 @@ var createCmd = &cobra.Command{ For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -76,53 +97,62 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteCredentialRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CREDENTIALS_ID", - Short: `Delete credential configuration.`, - Long: `Delete credential configuration. + cmd.Use = "delete CREDENTIALS_ID" + cmd.Short = `Delete credential configuration.` + cmd.Long = `Delete credential configuration. Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot delete a credential that is associated with any - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIALS_ID argument specified. Loading names for Credentials drop-down." - names, err := a.Credentials.CredentialCredentialsNameToCredentialsIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API credential configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api credential configuration id") - } deleteReq.CredentialsId = args[0] err = a.Credentials.Delete(ctx, deleteReq) @@ -130,52 +160,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CREDENTIALS_ID", - Short: `Get credential configuration.`, - Long: `Get credential configuration. + cmd.Use = "get CREDENTIALS_ID" + cmd.Short = `Get credential configuration.` + cmd.Long = `Get credential configuration. Gets a Databricks credential configuration object for an account, both - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIALS_ID argument specified. Loading names for Credentials drop-down." - names, err := a.Credentials.CredentialCredentialsNameToCredentialsIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API credential configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api credential configuration id") - } getReq.CredentialsId = args[0] response, err := a.Credentials.Get(ctx, getReq) @@ -183,30 +222,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all credential configurations.`, - Long: `Get all credential configurations. + cmd.Use = "list" + cmd.Short = `Get all credential configurations.` + cmd.Long = `Get all credential configurations. Gets all Databricks credential configurations associated with an account - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Credentials.List(ctx) @@ -214,10 +271,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Credentials diff --git a/cmd/account/credentials/overrides.go b/cmd/account/credentials/overrides.go index 505215055..9f1e6cb66 100644 --- a/cmd/account/credentials/overrides.go +++ b/cmd/account/credentials/overrides.go @@ -1,9 +1,16 @@ package credentials -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.CredentialsId | green}} {{.CredentialsName}} {{.AwsCredentials.StsRole.RoleArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go index 837ac5188..d7269bf47 100755 --- a/cmd/account/custom-app-integration/custom-app-integration.go +++ b/cmd/account/custom-app-integration/custom-app-integration.go @@ -12,48 +12,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "custom-app-integration", - Short: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, - Long: `These APIs enable administrators to manage custom oauth app integrations, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "custom-app-integration", + Short: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, + Long: `These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. **Note:** You can only add/use the OAuth custom application integrations when OAuth enrollment status is enabled. For more details see :method:OAuthEnrollment/create`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateCustomAppIntegration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateCustomAppIntegration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateCustomAppIntegration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`) + cmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`) // TODO: complex arg: token_access_policy -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create Custom OAuth App Integration.`, - Long: `Create Custom OAuth App Integration. + cmd.Use = "create" + cmd.Short = `Create Custom OAuth App Integration.` + cmd.Long = `Create Custom OAuth App Integration. Create Custom OAuth App Integration. You can retrieve the custom oauth app integration via - :method:CustomAppIntegration/get.`, + :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -71,36 +92,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeleteCustomAppIntegrationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeleteCustomAppIntegrationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeleteCustomAppIntegrationRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete INTEGRATION_ID", - Short: `Delete Custom OAuth App Integration.`, - Long: `Delete Custom OAuth App Integration. + cmd.Use = "delete INTEGRATION_ID" + cmd.Short = `Delete Custom OAuth App Integration.` + cmd.Long = `Delete Custom OAuth App Integration. Delete an existing Custom OAuth App Integration. You can retrieve the custom - oauth app integration via :method:CustomAppIntegration/get.`, + oauth app integration via :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -111,35 +154,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq oauth2.GetCustomAppIntegrationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *oauth2.GetCustomAppIntegrationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq oauth2.GetCustomAppIntegrationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INTEGRATION_ID", - Short: `Get OAuth Custom App Integration.`, - Long: `Get OAuth Custom App Integration. + cmd.Use = "get INTEGRATION_ID" + cmd.Short = `Get OAuth Custom App Integration.` + cmd.Long = `Get OAuth Custom App Integration. - Gets the Custom OAuth App Integration for the given integration id.`, + Gets the Custom OAuth App Integration for the given integration id.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -150,30 +215,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get custom oauth app integrations.`, - Long: `Get custom oauth app integrations. + cmd.Use = "list" + cmd.Short = `Get custom oauth app integrations.` + cmd.Long = `Get custom oauth app integrations. Get the list of custom oauth app integrations for the specified Databricks - account`, + account` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.CustomAppIntegration.ListAll(ctx) @@ -181,41 +264,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq oauth2.UpdateCustomAppIntegration -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *oauth2.UpdateCustomAppIntegration, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq oauth2.UpdateCustomAppIntegration + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: redirect_urls // TODO: complex arg: token_access_policy -} - -var updateCmd = &cobra.Command{ - Use: "update INTEGRATION_ID", - Short: `Updates Custom OAuth App Integration.`, - Long: `Updates Custom OAuth App Integration. + cmd.Use = "update INTEGRATION_ID" + cmd.Short = `Updates Custom OAuth App Integration.` + cmd.Long = `Updates Custom OAuth App Integration. Updates an existing custom OAuth App Integration. You can retrieve the custom - oauth app integration via :method:CustomAppIntegration/get.`, + oauth app integration via :method:CustomAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -232,10 +337,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service CustomAppIntegration diff --git a/cmd/account/encryption-keys/encryption-keys.go b/cmd/account/encryption-keys/encryption-keys.go index 0db4af80e..2172c49fc 100755 --- a/cmd/account/encryption-keys/encryption-keys.go +++ b/cmd/account/encryption-keys/encryption-keys.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "encryption-keys", - Short: `These APIs manage encryption key configurations for this workspace (optional).`, - Long: `These APIs manage encryption key configurations for this workspace (optional). +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "encryption-keys", + Short: `These APIs manage encryption key configurations for this workspace (optional).`, + Long: `These APIs manage encryption key configurations for this workspace (optional). A key configuration encapsulates the AWS KMS key information and some information about how the key configuration can be used. There are two possible uses for key configurations: @@ -31,29 +36,44 @@ var Cmd = &cobra.Command{ encryption requires that the workspace is on the E2 version of the platform. If you have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact your Databricks representative.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateCustomerManagedKeyRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateCustomerManagedKeyRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateCustomerManagedKeyRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_key_info // TODO: complex arg: gcp_key_info -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create encryption key configuration.`, - Long: `Create encryption key configuration. + cmd.Use = "create" + cmd.Short = `Create encryption key configuration.` + cmd.Long = `Create encryption key configuration. Creates a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to @@ -71,11 +91,12 @@ var createCmd = &cobra.Command{ This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -93,36 +114,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteEncryptionKeyRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteEncryptionKeyRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteEncryptionKeyRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CUSTOMER_MANAGED_KEY_ID", - Short: `Delete encryption key configuration.`, - Long: `Delete encryption key configuration. + cmd.Use = "delete CUSTOMER_MANAGED_KEY_ID" + cmd.Short = `Delete encryption key configuration.` + cmd.Long = `Delete encryption key configuration. Deletes a customer-managed key configuration object for an account. You cannot - delete a configuration that is associated with a running workspace.`, + delete a configuration that is associated with a running workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -133,25 +176,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetEncryptionKeyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetEncryptionKeyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetEncryptionKeyRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CUSTOMER_MANAGED_KEY_ID", - Short: `Get encryption key configuration.`, - Long: `Get encryption key configuration. + cmd.Use = "get CUSTOMER_MANAGED_KEY_ID" + cmd.Short = `Get encryption key configuration.` + cmd.Long = `Get encryption key configuration. Gets a customer-managed key configuration object for an account, specified by ID. This operation uploads a reference to a customer-managed key to @@ -167,15 +230,17 @@ var getCmd = &cobra.Command{ types, subscription types, and AWS regions. This operation is available only if your account is on the E2 version of the - platform.",`, + platform.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -186,23 +251,40 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all encryption key configurations.`, - Long: `Get all encryption key configurations. + cmd.Use = "list" + cmd.Short = `Get all encryption key configurations.` + cmd.Long = `Get all encryption key configurations. Gets all customer-managed key configuration objects for an account. If the key is specified as a workspace's managed services customer-managed key, @@ -216,11 +298,12 @@ var listCmd = &cobra.Command{ types, subscription types, and AWS regions. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.EncryptionKeys.List(ctx) @@ -228,10 +311,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service EncryptionKeys diff --git a/cmd/account/encryption-keys/overrides.go b/cmd/account/encryption-keys/overrides.go index 9a27ac00d..906211750 100644 --- a/cmd/account/encryption-keys/overrides.go +++ b/cmd/account/encryption-keys/overrides.go @@ -1,9 +1,16 @@ package encryption_keys -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.CustomerManagedKeyId | green}} {{range .UseCases}}{{.}} {{end}} {{.AwsKeyInfo.KeyArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/groups.go b/cmd/account/groups.go index 7c9d70e3d..10a795b03 100644 --- a/cmd/account/groups.go +++ b/cmd/account/groups.go @@ -32,11 +32,3 @@ func Groups() []cobra.Group { }, } } - -func init() { - // Register groups with parent command - groups := Groups() - for i := range groups { - accountCmd.AddGroup(&groups[i]) - } -} diff --git a/cmd/account/groups/groups.go b/cmd/account/groups/groups.go index 55d0c7810..09594fa3a 100755 --- a/cmd/account/groups/groups.go +++ b/cmd/account/groups/groups.go @@ -3,8 +3,6 @@ package groups import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,59 +10,81 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "groups", - Short: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.`, - Long: `Groups simplify identity management, making it easier to assign access to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups", + Short: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects. It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks account identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.Group -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.Group + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new group.`, - Long: `Create a new group. + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. Creates a group in the Databricks account with a unique name, using the - supplied group details.`, + supplied group details.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -81,51 +101,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountGroupRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountGroupRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a group.`, - Long: `Delete a group. + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. - Deletes a group from the Databricks account.`, + Deletes a group from the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." - names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks account") - } deleteReq.Id = args[0] err = a.Groups.Delete(ctx, deleteReq) @@ -133,51 +162,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountGroupRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountGroupRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get group details.`, - Long: `Get group details. + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. - Gets the information for a specific group in the Databricks account.`, + Gets the information for a specific group in the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." - names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks account") - } getReq.Id = args[0] response, err := a.Groups.Get(ctx, getReq) @@ -185,48 +223,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountGroupsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountGroupsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List group details.`, - Long: `List group details. + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. - Gets all details of the groups associated with the Databricks account.`, + Gets all details of the groups associated with the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -243,36 +303,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update group details.`, - Long: `Update group details. + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. - Partially updates the details of a group.`, + Partially updates the details of a group.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -282,23 +368,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." - names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks account") - } patchReq.Id = args[0] err = a.Groups.Patch(ctx, patchReq) @@ -306,42 +375,71 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.Group -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.Group + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a group.`, - Long: `Replace a group. + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. - Updates the details of a group by replacing the entire group entity.`, + Updates the details of a group by replacing the entire group entity.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -351,23 +449,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." - names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks group ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks group id") - } updateReq.Id = args[0] } @@ -376,10 +457,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountGroups diff --git a/cmd/account/groups/overrides.go b/cmd/account/groups/overrides.go index 28c91c4d2..37d05c64f 100644 --- a/cmd/account/groups/overrides.go +++ b/cmd/account/groups/overrides.go @@ -1,10 +1,18 @@ package groups -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListAccountGroupsRequest) { listReq.Attributes = "id,displayName" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.DisplayName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/ip-access-lists/ip-access-lists.go b/cmd/account/ip-access-lists/ip-access-lists.go index 7f43ff2a7..980dc7776 100755 --- a/cmd/account/ip-access-lists/ip-access-lists.go +++ b/cmd/account/ip-access-lists/ip-access-lists.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "ip-access-lists", - Short: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.`, - Long: `The Accounts IP Access List API enables account admins to configure IP access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "ip-access-lists", + Short: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console.`, + Long: `The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console. Account IP Access Lists affect web application access and REST API access to @@ -37,26 +42,41 @@ var Cmd = &cobra.Command{ After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateIpAccessList -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateIpAccessList, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateIpAccessList + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create access list.`, - Long: `Create access list. + cmd.Use = "create" + cmd.Short = `Create access list.` + cmd.Long = `Create access list. Creates an IP access list for the account. @@ -71,11 +91,12 @@ var createCmd = &cobra.Command{ * If the new list would block the calling user's current IP, error 400 is returned with error_code value INVALID_STATE. - It can take a few minutes for the changes to take effect.`, + It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -93,51 +114,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.DeleteAccountIpAccessListRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteAccountIpAccessListRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteAccountIpAccessListRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete IP_ACCESS_LIST_ID", - Short: `Delete access list.`, - Long: `Delete access list. + cmd.Use = "delete IP_ACCESS_LIST_ID" + cmd.Short = `Delete access list.` + cmd.Long = `Delete access list. - Deletes an IP access list, specified by its list ID.`, + Deletes an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Account Ip Access Lists drop-down." - names, err := a.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding ip access list") - } deleteReq.IpAccessListId = args[0] err = a.IpAccessLists.Delete(ctx, deleteReq) @@ -145,51 +175,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetAccountIpAccessListRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetAccountIpAccessListRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetAccountIpAccessListRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get IP_ACCESS_LIST_ID", - Short: `Get IP access list.`, - Long: `Get IP access list. + cmd.Use = "get IP_ACCESS_LIST_ID" + cmd.Short = `Get IP access list.` + cmd.Long = `Get IP access list. - Gets an IP access list, specified by its list ID.`, + Gets an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Account Ip Access Lists drop-down." - names, err := a.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding ip access list") - } getReq.IpAccessListId = args[0] response, err := a.IpAccessLists.Get(ctx, getReq) @@ -197,29 +236,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get access lists.`, - Long: `Get access lists. + cmd.Use = "list" + cmd.Short = `Get access lists.` + cmd.Long = `Get access lists. - Gets all IP access lists for the specified account.`, + Gets all IP access lists for the specified account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.IpAccessLists.ListAll(ctx) @@ -227,29 +284,49 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq settings.ReplaceIpAccessList -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *settings.ReplaceIpAccessList, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq settings.ReplaceIpAccessList + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - replaceCmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace", - Short: `Replace access list.`, - Long: `Replace access list. + cmd.Use = "replace" + cmd.Short = `Replace access list.` + cmd.Long = `Replace access list. Replaces an IP access list, specified by its ID. @@ -260,11 +337,12 @@ var replaceCmd = &cobra.Command{ counts as a single value. Attempts to exceed that number return error 400 with error_code value QUOTA_EXCEEDED. * If the resulting list would block the calling user's current IP, error 400 is returned with error_code value - INVALID_STATE. It can take a few minutes for the changes to take effect.`, + INVALID_STATE. It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -282,29 +360,49 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // start update command -var updateReq settings.UpdateIpAccessList -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *settings.UpdateIpAccessList, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq settings.UpdateIpAccessList + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update access list.`, - Long: `Update access list. + cmd.Use = "update" + cmd.Short = `Update access list.` + cmd.Long = `Update access list. Updates an existing IP access list, specified by its ID. @@ -319,11 +417,12 @@ var updateCmd = &cobra.Command{ * If the updated list would block the calling user's current IP, error 400 is returned with error_code value INVALID_STATE. - It can take a few minutes for the changes to take effect.`, + It can take a few minutes for the changes to take effect.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -341,10 +440,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountIpAccessLists diff --git a/cmd/account/log-delivery/log-delivery.go b/cmd/account/log-delivery/log-delivery.go index d5ae87b1b..2018932ee 100755 --- a/cmd/account/log-delivery/log-delivery.go +++ b/cmd/account/log-delivery/log-delivery.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "log-delivery", - Short: `These APIs manage log delivery configurations for this account.`, - Long: `These APIs manage log delivery configurations for this account. The two +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "log-delivery", + Short: `These APIs manage log delivery configurations for this account.`, + Long: `These APIs manage log delivery configurations for this account. The two supported log types for this API are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all account ID types. @@ -75,28 +80,43 @@ var Cmd = &cobra.Command{ [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html`, - Annotations: map[string]string{ - "package": "billing", - }, + GroupID: "billing", + Annotations: map[string]string{ + "package": "billing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq billing.WrappedCreateLogDeliveryConfiguration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *billing.WrappedCreateLogDeliveryConfiguration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq billing.WrappedCreateLogDeliveryConfiguration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: log_delivery_configuration -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new log delivery configuration.`, - Long: `Create a new log delivery configuration. + cmd.Use = "create" + cmd.Short = `Create a new log delivery configuration.` + cmd.Long = `Create a new log delivery configuration. Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you @@ -123,18 +143,20 @@ var createCmd = &cobra.Command{ configuration](#operation/patch-log-delivery-config-status)). [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html - [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html`, + [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -151,52 +173,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start get command -var getReq billing.GetLogDeliveryRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *billing.GetLogDeliveryRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq billing.GetLogDeliveryRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get LOG_DELIVERY_CONFIGURATION_ID", - Short: `Get log delivery configuration.`, - Long: `Get log delivery configuration. + cmd.Use = "get LOG_DELIVERY_CONFIGURATION_ID" + cmd.Short = `Get log delivery configuration.` + cmd.Long = `Get log delivery configuration. Gets a Databricks log delivery configuration object for an account, both - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No LOG_DELIVERY_CONFIGURATION_ID argument specified. Loading names for Log Delivery drop-down." - names, err := a.LogDelivery.LogDeliveryConfigurationConfigNameToConfigIdMap(ctx, billing.ListLogDeliveryRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Log Delivery drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks log delivery configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks log delivery configuration id") - } getReq.LogDeliveryConfigurationId = args[0] response, err := a.LogDelivery.Get(ctx, getReq) @@ -204,45 +235,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq billing.ListLogDeliveryRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *billing.ListLogDeliveryRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq billing.ListLogDeliveryRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CredentialsId, "credentials-id", listReq.CredentialsId, `Filter by credential configuration ID.`) - listCmd.Flags().Var(&listReq.Status, "status", `Filter by status ENABLED or DISABLED.`) - listCmd.Flags().StringVar(&listReq.StorageConfigurationId, "storage-configuration-id", listReq.StorageConfigurationId, `Filter by storage configuration ID.`) + cmd.Flags().StringVar(&listReq.CredentialsId, "credentials-id", listReq.CredentialsId, `Filter by credential configuration ID.`) + cmd.Flags().Var(&listReq.Status, "status", `Filter by status ENABLED or DISABLED.`) + cmd.Flags().StringVar(&listReq.StorageConfigurationId, "storage-configuration-id", listReq.StorageConfigurationId, `Filter by storage configuration ID.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all log delivery configurations.`, - Long: `Get all log delivery configurations. + cmd.Use = "list" + cmd.Short = `Get all log delivery configurations.` + cmd.Long = `Get all log delivery configurations. Gets all Databricks log delivery configurations associated with an account - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -259,39 +312,61 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch-status command -var patchStatusReq billing.UpdateLogDeliveryConfigurationStatusRequest -func init() { - Cmd.AddCommand(patchStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchStatusOverrides []func( + *cobra.Command, + *billing.UpdateLogDeliveryConfigurationStatusRequest, +) + +func newPatchStatus() *cobra.Command { + cmd := &cobra.Command{} + + var patchStatusReq billing.UpdateLogDeliveryConfigurationStatusRequest + // TODO: short flags -} - -var patchStatusCmd = &cobra.Command{ - Use: "patch-status STATUS LOG_DELIVERY_CONFIGURATION_ID", - Short: `Enable or disable log delivery configuration.`, - Long: `Enable or disable log delivery configuration. + cmd.Use = "patch-status STATUS LOG_DELIVERY_CONFIGURATION_ID" + cmd.Short = `Enable or disable log delivery configuration.` + cmd.Long = `Enable or disable log delivery configuration. Enables or disables a log delivery configuration. Deletion of delivery configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under - [Create log delivery](#operation/create-log-delivery-config).`, + [Create log delivery](#operation/create-log-delivery-config).` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -306,10 +381,24 @@ var patchStatusCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchStatusOverrides { + fn(cmd, &patchStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatchStatus()) + }) } // end service LogDelivery diff --git a/cmd/account/metastore-assignments/metastore-assignments.go b/cmd/account/metastore-assignments/metastore-assignments.go index 673bb8f4c..8b571f1e5 100755 --- a/cmd/account/metastore-assignments/metastore-assignments.go +++ b/cmd/account/metastore-assignments/metastore-assignments.go @@ -12,43 +12,65 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastore-assignments", - Short: `These APIs manage metastore assignments to a workspace.`, - Long: `These APIs manage metastore assignments to a workspace.`, - Annotations: map[string]string{ - "package": "catalog", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastore-assignments", + Short: `These APIs manage metastore assignments to a workspace.`, + Long: `These APIs manage metastore assignments to a workspace.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateMetastoreAssignment -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateMetastoreAssignment, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateMetastoreAssignment + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_assignment -} - -var createCmd = &cobra.Command{ - Use: "create WORKSPACE_ID METASTORE_ID", - Short: `Assigns a workspace to a metastore.`, - Long: `Assigns a workspace to a metastore. + cmd.Use = "create WORKSPACE_ID METASTORE_ID" + cmd.Short = `Assigns a workspace to a metastore.` + cmd.Long = `Assigns a workspace to a metastore. Creates an assignment to a metastore for a workspace Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -69,37 +91,59 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountMetastoreAssignmentRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountMetastoreAssignmentRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountMetastoreAssignmentRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID METASTORE_ID", - Short: `Delete a metastore assignment.`, - Long: `Delete a metastore assignment. + cmd.Use = "delete WORKSPACE_ID METASTORE_ID" + cmd.Short = `Delete a metastore assignment.` + cmd.Long = `Delete a metastore assignment. Deletes a metastore assignment to a workspace, leaving the workspace with no metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API.`, + to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -114,39 +158,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountMetastoreAssignmentRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountMetastoreAssignmentRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountMetastoreAssignmentRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `Gets the metastore assignment for a workspace.`, - Long: `Gets the metastore assignment for a workspace. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `Gets the metastore assignment for a workspace.` + cmd.Long = `Gets the metastore assignment for a workspace. Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a 404 returned. Please add a header X-Databricks-Account-Console-API-Version: - 2.0 to access this API.`, + 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -160,37 +226,59 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListAccountMetastoreAssignmentsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListAccountMetastoreAssignmentsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListAccountMetastoreAssignmentsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `Get all workspaces assigned to a metastore.`, - Long: `Get all workspaces assigned to a metastore. + cmd.Use = "list METASTORE_ID" + cmd.Short = `Get all workspaces assigned to a metastore.` + cmd.Long = `Get all workspaces assigned to a metastore. Gets a list of all Databricks workspace IDs that have been assigned to given metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API`, + to access this API` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -201,41 +289,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateMetastoreAssignment -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateMetastoreAssignment, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateMetastoreAssignment + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_assignment -} - -var updateCmd = &cobra.Command{ - Use: "update WORKSPACE_ID METASTORE_ID", - Short: `Updates a metastore assignment to a workspaces.`, - Long: `Updates a metastore assignment to a workspaces. + cmd.Use = "update WORKSPACE_ID METASTORE_ID" + cmd.Short = `Updates a metastore assignment to a workspaces.` + cmd.Long = `Updates a metastore assignment to a workspaces. Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be updated. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -256,10 +366,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountMetastoreAssignments diff --git a/cmd/account/metastores/metastores.go b/cmd/account/metastores/metastores.go index 89e1c8f2e..48c8a6b03 100755 --- a/cmd/account/metastores/metastores.go +++ b/cmd/account/metastores/metastores.go @@ -10,47 +10,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastores", - Short: `These APIs manage Unity Catalog metastores for an account.`, - Long: `These APIs manage Unity Catalog metastores for an account. A metastore +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastores", + Short: `These APIs manage Unity Catalog metastores for an account.`, + Long: `These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be associated with workspaces`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateMetastore -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateMetastore, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateMetastore + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_info -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create metastore.`, - Long: `Create metastore. + cmd.Use = "create" + cmd.Short = `Create metastore.` + cmd.Long = `Create metastore. Creates a Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -67,38 +89,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountMetastoreRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountMetastoreRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountMetastoreRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete METASTORE_ID", - Short: `Delete a metastore.`, - Long: `Delete a metastore. + cmd.Use = "delete METASTORE_ID" + cmd.Short = `Delete a metastore.` + cmd.Long = `Delete a metastore. Deletes a Unity Catalog metastore for an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -109,36 +153,58 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountMetastoreRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountMetastoreRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountMetastoreRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get METASTORE_ID", - Short: `Get a metastore.`, - Long: `Get a metastore. + cmd.Use = "get METASTORE_ID" + cmd.Short = `Get a metastore.` + cmd.Long = `Get a metastore. Gets a Unity Catalog metastore from an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -149,31 +215,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all metastores associated with an account.`, - Long: `Get all metastores associated with an account. + cmd.Use = "list" + cmd.Short = `Get all metastores associated with an account.` + cmd.Long = `Get all metastores associated with an account. Gets all Unity Catalog metastores associated with an account specified by ID. Please add a header X-Databricks-Account-Console-API-Version: 2.0 to access - this API.`, + this API.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Metastores.List(ctx) @@ -181,40 +265,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateMetastore -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateMetastore, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateMetastore + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: metastore_info -} - -var updateCmd = &cobra.Command{ - Use: "update METASTORE_ID", - Short: `Update a metastore.`, - Long: `Update a metastore. + cmd.Use = "update METASTORE_ID" + cmd.Short = `Update a metastore.` + cmd.Long = `Update a metastore. Updates an existing Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.`, + X-Databricks-Account-Console-API-Version: 2.0 to access this API.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -231,10 +337,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountMetastores diff --git a/cmd/account/networks/networks.go b/cmd/account/networks/networks.go index 331f0869a..36867cf25 100755 --- a/cmd/account/networks/networks.go +++ b/cmd/account/networks/networks.go @@ -3,8 +3,6 @@ package networks import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,52 +10,74 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "networks", - Short: `These APIs manage network configurations for customer-managed VPCs (optional).`, - Long: `These APIs manage network configurations for customer-managed VPCs (optional). +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "networks", + Short: `These APIs manage network configurations for customer-managed VPCs (optional).`, + Long: `These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when creating a new workspace if you use customer-managed VPCs.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateNetworkRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateNetworkRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateNetworkRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: gcp_network_info // TODO: array: security_group_ids // TODO: array: subnet_ids // TODO: complex arg: vpc_endpoints - createCmd.Flags().StringVar(&createReq.VpcId, "vpc-id", createReq.VpcId, `The ID of the VPC associated with this network.`) + cmd.Flags().StringVar(&createReq.VpcId, "vpc-id", createReq.VpcId, `The ID of the VPC associated with this network.`) -} - -var createCmd = &cobra.Command{ - Use: "create NETWORK_NAME", - Short: `Create network configuration.`, - Long: `Create network configuration. + cmd.Use = "create NETWORK_NAME" + cmd.Short = `Create network configuration.` + cmd.Long = `Create network configuration. Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be used for new Databricks clusters. This requires a - pre-existing VPC and subnets.`, + pre-existing VPC and subnets.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -75,56 +95,65 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteNetworkRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteNetworkRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteNetworkRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NETWORK_ID", - Short: `Delete a network configuration.`, - Long: `Delete a network configuration. + cmd.Use = "delete NETWORK_ID" + cmd.Short = `Delete a network configuration.` + cmd.Long = `Delete a network configuration. Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot delete a network that is associated with a workspace. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NETWORK_ID argument specified. Loading names for Networks drop-down." - names, err := a.Networks.NetworkNetworkNameToNetworkIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Networks drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API network configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api network configuration id") - } deleteReq.NetworkId = args[0] err = a.Networks.Delete(ctx, deleteReq) @@ -132,52 +161,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetNetworkRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetNetworkRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetNetworkRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NETWORK_ID", - Short: `Get a network configuration.`, - Long: `Get a network configuration. + cmd.Use = "get NETWORK_ID" + cmd.Short = `Get a network configuration.` + cmd.Long = `Get a network configuration. Gets a Databricks network configuration, which represents a cloud VPC and its - resources.`, + resources.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NETWORK_ID argument specified. Loading names for Networks drop-down." - names, err := a.Networks.NetworkNetworkNameToNetworkIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Networks drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API network configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api network configuration id") - } getReq.NetworkId = args[0] response, err := a.Networks.Get(ctx, getReq) @@ -185,33 +223,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all network configurations.`, - Long: `Get all network configurations. + cmd.Use = "list" + cmd.Short = `Get all network configurations.` + cmd.Long = `Get all network configurations. Gets a list of all Databricks network configurations for an account, specified by ID. This operation is available only if your account is on the E2 version of the - platform.`, + platform.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Networks.List(ctx) @@ -219,10 +275,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Networks diff --git a/cmd/account/networks/overrides.go b/cmd/account/networks/overrides.go index d47b9ce38..082ee242d 100644 --- a/cmd/account/networks/overrides.go +++ b/cmd/account/networks/overrides.go @@ -1,9 +1,16 @@ package networks -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.NetworkId | green}} {{.NetworkName}} {{.WorkspaceId}} {{.VpcStatus}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/o-auth-enrollment/o-auth-enrollment.go b/cmd/account/o-auth-enrollment/o-auth-enrollment.go index a39306a37..91fdfa0a7 100755 --- a/cmd/account/o-auth-enrollment/o-auth-enrollment.go +++ b/cmd/account/o-auth-enrollment/o-auth-enrollment.go @@ -10,36 +10,56 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "o-auth-enrollment", - Short: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.`, - Long: `These APIs enable administrators to enroll OAuth for their accounts, which is +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "o-auth-enrollment", + Short: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration.`, + Long: `These APIs enable administrators to enroll OAuth for their accounts, which is required for adding/using any OAuth published/custom application integration. **Note:** Your account must be on the E2 version to use these APIs, this is because OAuth is only supported on the E2 version.`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateOAuthEnrollment -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateOAuthEnrollment, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateOAuthEnrollment + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.EnableAllPublishedApps, "enable-all-published-apps", createReq.EnableAllPublishedApps, `If true, enable OAuth for all the published applications in the account.`) + cmd.Flags().BoolVar(&createReq.EnableAllPublishedApps, "enable-all-published-apps", createReq.EnableAllPublishedApps, `If true, enable OAuth for all the published applications in the account.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create OAuth Enrollment request.`, - Long: `Create OAuth Enrollment request. + cmd.Use = "create" + cmd.Short = `Create OAuth Enrollment request.` + cmd.Long = `Create OAuth Enrollment request. Create an OAuth Enrollment request to enroll OAuth for this account and optionally enable the OAuth integration for all the partner applications in @@ -49,18 +69,20 @@ var createCmd = &cobra.Command{ The enrollment is executed asynchronously, so the API will return 204 immediately. The actual enrollment take a few minutes, you can check the - status via API :method:OAuthEnrollment/get.`, + status via API :method:OAuthEnrollment/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -77,32 +99,50 @@ var createCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start get command -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, +) -} +func newGet() *cobra.Command { + cmd := &cobra.Command{} -var getCmd = &cobra.Command{ - Use: "get", - Short: `Get OAuth enrollment status.`, - Long: `Get OAuth enrollment status. + cmd.Use = "get" + cmd.Short = `Get OAuth enrollment status.` + cmd.Long = `Get OAuth enrollment status. Gets the OAuth enrollment status for this Account. You can only add/use the OAuth published/custom application integrations when - OAuth enrollment status is enabled.`, + OAuth enrollment status is enabled.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.OAuthEnrollment.Get(ctx) @@ -110,10 +150,24 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // end service OAuthEnrollment diff --git a/cmd/account/private-access/private-access.go b/cmd/account/private-access/private-access.go index ebb31dd03..419886a80 100755 --- a/cmd/account/private-access/private-access.go +++ b/cmd/account/private-access/private-access.go @@ -3,8 +3,6 @@ package private_access import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,34 +10,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "private-access", - Short: `These APIs manage private access settings for this account.`, - Long: `These APIs manage private access settings for this account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "private-access", + Short: `These APIs manage private access settings for this account.`, + Long: `These APIs manage private access settings for this account.`, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.UpsertPrivateAccessSettingsRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.UpsertPrivateAccessSettingsRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.UpsertPrivateAccessSettingsRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - createCmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) - createCmd.Flags().BoolVar(&createReq.PublicAccessEnabled, "public-access-enabled", createReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) + cmd.Flags().Var(&createReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) + cmd.Flags().BoolVar(&createReq.PublicAccessEnabled, "public-access-enabled", createReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) -} - -var createCmd = &cobra.Command{ - Use: "create PRIVATE_ACCESS_SETTINGS_NAME REGION", - Short: `Create private access settings.`, - Long: `Create private access settings. + cmd.Use = "create PRIVATE_ACCESS_SETTINGS_NAME REGION" + cmd.Short = `Create private access settings.` + cmd.Long = `Create private access settings. Creates a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must @@ -55,18 +73,20 @@ var createCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -85,25 +105,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeletePrivateAccesRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeletePrivateAccesRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeletePrivateAccesRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete PRIVATE_ACCESS_SETTINGS_ID", - Short: `Delete a private access settings object.`, - Long: `Delete a private access settings object. + cmd.Use = "delete PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Delete a private access settings object.` + cmd.Long = `Delete a private access settings object. Deletes a private access settings object, which determines how your workspace is accessed over [AWS PrivateLink]. @@ -112,31 +152,20 @@ var deleteCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PRIVATE_ACCESS_SETTINGS_ID argument specified. Loading names for Private Access drop-down." - names, err := a.PrivateAccess.PrivateAccessSettingsPrivateAccessSettingsNameToPrivateAccessSettingsIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Private Access drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API private access settings ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api private access settings id") - } deleteReq.PrivateAccessSettingsId = args[0] err = a.PrivateAccess.Delete(ctx, deleteReq) @@ -144,25 +173,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetPrivateAccesRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetPrivateAccesRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetPrivateAccesRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get PRIVATE_ACCESS_SETTINGS_ID", - Short: `Get a private access settings object.`, - Long: `Get a private access settings object. + cmd.Use = "get PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Get a private access settings object.` + cmd.Long = `Get a private access settings object. Gets a private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. @@ -171,31 +220,20 @@ var getCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PRIVATE_ACCESS_SETTINGS_ID argument specified. Loading names for Private Access drop-down." - names, err := a.PrivateAccess.PrivateAccessSettingsPrivateAccessSettingsNameToPrivateAccessSettingsIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Private Access drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API private access settings ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api private access settings id") - } getReq.PrivateAccessSettingsId = args[0] response, err := a.PrivateAccess.Get(ctx, getReq) @@ -203,30 +241,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all private access settings objects.`, - Long: `Get all private access settings objects. + cmd.Use = "list" + cmd.Short = `Get all private access settings objects.` + cmd.Long = `Get all private access settings objects. Gets a list of all private access settings objects for an account, specified - by ID.`, + by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.PrivateAccess.List(ctx) @@ -234,31 +290,51 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq provisioning.UpsertPrivateAccessSettingsRequest -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *provisioning.UpsertPrivateAccessSettingsRequest, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq provisioning.UpsertPrivateAccessSettingsRequest + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: allowed_vpc_endpoint_ids - replaceCmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) - replaceCmd.Flags().BoolVar(&replaceReq.PublicAccessEnabled, "public-access-enabled", replaceReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) + cmd.Flags().Var(&replaceReq.PrivateAccessLevel, "private-access-level", `The private access level controls which VPC endpoints can connect to the UI or API of any workspace that attaches this private access settings object.`) + cmd.Flags().BoolVar(&replaceReq.PublicAccessEnabled, "public-access-enabled", replaceReq.PublicAccessEnabled, `Determines if the workspace can be accessed over public internet.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace PRIVATE_ACCESS_SETTINGS_NAME REGION PRIVATE_ACCESS_SETTINGS_ID", - Short: `Replace private access settings.`, - Long: `Replace private access settings. + cmd.Use = "replace PRIVATE_ACCESS_SETTINGS_NAME REGION PRIVATE_ACCESS_SETTINGS_ID" + cmd.Short = `Replace private access settings.` + cmd.Long = `Replace private access settings. Updates an existing private access settings object, which specifies how your workspace is accessed over [AWS PrivateLink]. To use AWS PrivateLink, a @@ -280,15 +356,17 @@ var replaceCmd = &cobra.Command{ PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -307,10 +385,24 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // end service PrivateAccess diff --git a/cmd/account/published-app-integration/published-app-integration.go b/cmd/account/published-app-integration/published-app-integration.go index 7eb6d4c9e..b367ad71a 100755 --- a/cmd/account/published-app-integration/published-app-integration.go +++ b/cmd/account/published-app-integration/published-app-integration.go @@ -10,55 +10,77 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "published-app-integration", - Short: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, - Long: `These APIs enable administrators to manage published oauth app integrations, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "published-app-integration", + Short: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.`, + Long: `These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. **Note:** You can only add/use the OAuth published application integrations when OAuth enrollment status is enabled. For more details see :method:OAuthEnrollment/create`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreatePublishedAppIntegration -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreatePublishedAppIntegration, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreatePublishedAppIntegration + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `app_id of the oauth published app integration.`) + cmd.Flags().StringVar(&createReq.AppId, "app-id", createReq.AppId, `app_id of the oauth published app integration.`) // TODO: complex arg: token_access_policy -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create Published OAuth App Integration.`, - Long: `Create Published OAuth App Integration. + cmd.Use = "create" + cmd.Short = `Create Published OAuth App Integration.` + cmd.Long = `Create Published OAuth App Integration. Create Published OAuth App Integration. You can retrieve the published oauth app integration via - :method:PublishedAppIntegration/get.`, + :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -75,36 +97,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeletePublishedAppIntegrationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeletePublishedAppIntegrationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeletePublishedAppIntegrationRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete INTEGRATION_ID", - Short: `Delete Published OAuth App Integration.`, - Long: `Delete Published OAuth App Integration. + cmd.Use = "delete INTEGRATION_ID" + cmd.Short = `Delete Published OAuth App Integration.` + cmd.Long = `Delete Published OAuth App Integration. Delete an existing Published OAuth App Integration. You can retrieve the - published oauth app integration via :method:PublishedAppIntegration/get.`, + published oauth app integration via :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -115,35 +159,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq oauth2.GetPublishedAppIntegrationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *oauth2.GetPublishedAppIntegrationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq oauth2.GetPublishedAppIntegrationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INTEGRATION_ID", - Short: `Get OAuth Published App Integration.`, - Long: `Get OAuth Published App Integration. + cmd.Use = "get INTEGRATION_ID" + cmd.Short = `Get OAuth Published App Integration.` + cmd.Long = `Get OAuth Published App Integration. - Gets the Published OAuth App Integration for the given integration id.`, + Gets the Published OAuth App Integration for the given integration id.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -154,30 +220,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get published oauth app integrations.`, - Long: `Get published oauth app integrations. + cmd.Use = "list" + cmd.Short = `Get published oauth app integrations.` + cmd.Long = `Get published oauth app integrations. Get the list of published oauth app integrations for the specified Databricks - account`, + account` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.PublishedAppIntegration.ListAll(ctx) @@ -185,40 +269,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq oauth2.UpdatePublishedAppIntegration -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *oauth2.UpdatePublishedAppIntegration, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq oauth2.UpdatePublishedAppIntegration + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: token_access_policy -} - -var updateCmd = &cobra.Command{ - Use: "update INTEGRATION_ID", - Short: `Updates Published OAuth App Integration.`, - Long: `Updates Published OAuth App Integration. + cmd.Use = "update INTEGRATION_ID" + cmd.Short = `Updates Published OAuth App Integration.` + cmd.Long = `Updates Published OAuth App Integration. Updates an existing published OAuth App Integration. You can retrieve the - published oauth app integration via :method:PublishedAppIntegration/get.`, + published oauth app integration via :method:PublishedAppIntegration/get.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -235,10 +341,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service PublishedAppIntegration diff --git a/cmd/account/service-principal-secrets/service-principal-secrets.go b/cmd/account/service-principal-secrets/service-principal-secrets.go index 8c4c1fb95..a28f75faa 100755 --- a/cmd/account/service-principal-secrets/service-principal-secrets.go +++ b/cmd/account/service-principal-secrets/service-principal-secrets.go @@ -11,10 +11,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principal-secrets", - Short: `These APIs enable administrators to manage service principal secrets.`, - Long: `These APIs enable administrators to manage service principal secrets. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principal-secrets", + Short: `These APIs enable administrators to manage service principal secrets.`, + Long: `These APIs enable administrators to manage service principal secrets. You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be used to access Databricks Accounts and Workspace @@ -27,34 +32,51 @@ var Cmd = &cobra.Command{ [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal`, - Annotations: map[string]string{ - "package": "oauth2", - }, + GroupID: "oauth2", + Annotations: map[string]string{ + "package": "oauth2", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq oauth2.CreateServicePrincipalSecretRequest -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *oauth2.CreateServicePrincipalSecretRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq oauth2.CreateServicePrincipalSecretRequest + // TODO: short flags -} - -var createCmd = &cobra.Command{ - Use: "create SERVICE_PRINCIPAL_ID", - Short: `Create service principal secret.`, - Long: `Create service principal secret. + cmd.Use = "create SERVICE_PRINCIPAL_ID" + cmd.Short = `Create service principal secret.` + cmd.Long = `Create service principal secret. - Create a secret for the given service principal.`, + Create a secret for the given service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -68,35 +90,57 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq oauth2.DeleteServicePrincipalSecretRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *oauth2.DeleteServicePrincipalSecretRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq oauth2.DeleteServicePrincipalSecretRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete SERVICE_PRINCIPAL_ID SECRET_ID", - Short: `Delete service principal secret.`, - Long: `Delete service principal secret. + cmd.Use = "delete SERVICE_PRINCIPAL_ID SECRET_ID" + cmd.Short = `Delete service principal secret.` + cmd.Long = `Delete service principal secret. - Delete a secret from the given service principal.`, + Delete a secret from the given service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -111,37 +155,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -var listReq oauth2.ListServicePrincipalSecretsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *oauth2.ListServicePrincipalSecretsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq oauth2.ListServicePrincipalSecretsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list SERVICE_PRINCIPAL_ID", - Short: `List service principal secrets.`, - Long: `List service principal secrets. + cmd.Use = "list SERVICE_PRINCIPAL_ID" + cmd.Short = `List service principal secrets.` + cmd.Long = `List service principal secrets. List all secrets associated with the given service principal. This operation only returns information about the secrets themselves and does not include the - secret values.`, + secret values.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -155,10 +221,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service ServicePrincipalSecrets diff --git a/cmd/account/service-principals/overrides.go b/cmd/account/service-principals/overrides.go index c335bead6..d94a4267c 100644 --- a/cmd/account/service-principals/overrides.go +++ b/cmd/account/service-principals/overrides.go @@ -1,9 +1,17 @@ package service_principals -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *iam.ListAccountServicePrincipalsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.ApplicationId}} {{.DisplayName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/service-principals/service-principals.go b/cmd/account/service-principals/service-principals.go index 55b7492ff..7ab354337 100755 --- a/cmd/account/service-principals/service-principals.go +++ b/cmd/account/service-principals/service-principals.go @@ -3,8 +3,6 @@ package service_principals import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,57 +10,79 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principals", - Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, - Long: `Identities for use with jobs, automated tools, and systems such as scripts, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.ServicePrincipal -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.ServicePrincipal + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a service principal.`, - Long: `Create a service principal. + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. - Creates a new service principal in the Databricks account.`, + Creates a new service principal in the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -79,51 +99,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountServicePrincipalRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountServicePrincipalRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a service principal.`, - Long: `Delete a service principal. + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. - Delete a single service principal in the Databricks account.`, + Delete a single service principal in the Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." - names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks account") - } deleteReq.Id = args[0] err = a.ServicePrincipals.Delete(ctx, deleteReq) @@ -131,52 +160,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountServicePrincipalRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountServicePrincipalRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get service principal details.`, - Long: `Get service principal details. + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. Gets the details for a single service principal define in the Databricks - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." - names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks account") - } getReq.Id = args[0] response, err := a.ServicePrincipals.Get(ctx, getReq) @@ -184,48 +222,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountServicePrincipalsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountServicePrincipalsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List service principals.`, - Long: `List service principals. + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. - Gets the set of service principals associated with a Databricks account.`, + Gets the set of service principals associated with a Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -242,37 +302,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update service principal details.`, - Long: `Update service principal details. + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. Partially updates the details of a single service principal in the Databricks - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -282,23 +368,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." - names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks account") - } patchReq.Id = args[0] err = a.ServicePrincipals.Patch(ctx, patchReq) @@ -306,44 +375,73 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.ServicePrincipal -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.ServicePrincipal + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace service principal.`, - Long: `Replace service principal. + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. Updates the details of a single service principal. - This action replaces the existing service principal with the same name.`, + This action replaces the existing service principal with the same name.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -353,23 +451,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." - names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks service principal ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks service principal id") - } updateReq.Id = args[0] } @@ -378,10 +459,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountServicePrincipals diff --git a/cmd/account/settings/settings.go b/cmd/account/settings/settings.go index c55c7ad62..4e98119dd 100755 --- a/cmd/account/settings/settings.go +++ b/cmd/account/settings/settings.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "settings", - Short: `The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.`, - Long: `The Personal Compute enablement setting lets you control which users can use +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "settings", + Short: `The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources.`, + Long: `The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can change the setting to instead let individual workspaces configure access control (DELEGATE). @@ -22,37 +27,54 @@ var Cmd = &cobra.Command{ a default value, this setting is present on all accounts even though it's never set on a given account. Deletion reverts the value of the setting back to the default value.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete-personal-compute-setting command -var deletePersonalComputeSettingReq settings.DeletePersonalComputeSettingRequest -func init() { - Cmd.AddCommand(deletePersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deletePersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.DeletePersonalComputeSettingRequest, +) + +func newDeletePersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var deletePersonalComputeSettingReq settings.DeletePersonalComputeSettingRequest + // TODO: short flags -} - -var deletePersonalComputeSettingCmd = &cobra.Command{ - Use: "delete-personal-compute-setting ETAG", - Short: `Delete Personal Compute setting.`, - Long: `Delete Personal Compute setting. + cmd.Use = "delete-personal-compute-setting ETAG" + cmd.Short = `Delete Personal Compute setting.` + cmd.Long = `Delete Personal Compute setting. - Reverts back the Personal Compute setting value to default (ON)`, + Reverts back the Personal Compute setting value to default (ON)` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,35 +85,57 @@ var deletePersonalComputeSettingCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deletePersonalComputeSettingOverrides { + fn(cmd, &deletePersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeletePersonalComputeSetting()) + }) } // start read-personal-compute-setting command -var readPersonalComputeSettingReq settings.ReadPersonalComputeSettingRequest -func init() { - Cmd.AddCommand(readPersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readPersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.ReadPersonalComputeSettingRequest, +) + +func newReadPersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var readPersonalComputeSettingReq settings.ReadPersonalComputeSettingRequest + // TODO: short flags -} - -var readPersonalComputeSettingCmd = &cobra.Command{ - Use: "read-personal-compute-setting ETAG", - Short: `Get Personal Compute setting.`, - Long: `Get Personal Compute setting. + cmd.Use = "read-personal-compute-setting ETAG" + cmd.Short = `Get Personal Compute setting.` + cmd.Long = `Get Personal Compute setting. - Gets the value of the Personal Compute setting.`, + Gets the value of the Personal Compute setting.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -102,43 +146,65 @@ var readPersonalComputeSettingCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readPersonalComputeSettingOverrides { + fn(cmd, &readPersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReadPersonalComputeSetting()) + }) } // start update-personal-compute-setting command -var updatePersonalComputeSettingReq settings.UpdatePersonalComputeSettingRequest -var updatePersonalComputeSettingJson flags.JsonFlag -func init() { - Cmd.AddCommand(updatePersonalComputeSettingCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePersonalComputeSettingOverrides []func( + *cobra.Command, + *settings.UpdatePersonalComputeSettingRequest, +) + +func newUpdatePersonalComputeSetting() *cobra.Command { + cmd := &cobra.Command{} + + var updatePersonalComputeSettingReq settings.UpdatePersonalComputeSettingRequest + var updatePersonalComputeSettingJson flags.JsonFlag + // TODO: short flags - updatePersonalComputeSettingCmd.Flags().Var(&updatePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updatePersonalComputeSettingJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updatePersonalComputeSettingCmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `This should always be set to true for Settings RPCs.`) + cmd.Flags().BoolVar(&updatePersonalComputeSettingReq.AllowMissing, "allow-missing", updatePersonalComputeSettingReq.AllowMissing, `This should always be set to true for Settings RPCs.`) // TODO: complex arg: setting -} - -var updatePersonalComputeSettingCmd = &cobra.Command{ - Use: "update-personal-compute-setting", - Short: `Update Personal Compute setting.`, - Long: `Update Personal Compute setting. + cmd.Use = "update-personal-compute-setting" + cmd.Short = `Update Personal Compute setting.` + cmd.Long = `Update Personal Compute setting. - Updates the value of the Personal Compute setting.`, + Updates the value of the Personal Compute setting.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -155,10 +221,24 @@ var updatePersonalComputeSettingCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePersonalComputeSettingOverrides { + fn(cmd, &updatePersonalComputeSettingReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePersonalComputeSetting()) + }) } // end service AccountSettings diff --git a/cmd/account/storage-credentials/storage-credentials.go b/cmd/account/storage-credentials/storage-credentials.go index f5dd58200..451b71121 100755 --- a/cmd/account/storage-credentials/storage-credentials.go +++ b/cmd/account/storage-credentials/storage-credentials.go @@ -10,32 +10,52 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage-credentials", - Short: `These APIs manage storage credentials for a particular metastore.`, - Long: `These APIs manage storage credentials for a particular metastore.`, - Annotations: map[string]string{ - "package": "catalog", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage-credentials", + Short: `These APIs manage storage credentials for a particular metastore.`, + Long: `These APIs manage storage credentials for a particular metastore.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.AccountsCreateStorageCredential -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.AccountsCreateStorageCredential, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.AccountsCreateStorageCredential + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: credential_info -} - -var createCmd = &cobra.Command{ - Use: "create METASTORE_ID", - Short: `Create a storage credential.`, - Long: `Create a storage credential. + cmd.Use = "create METASTORE_ID" + cmd.Short = `Create a storage credential.` + cmd.Long = `Create a storage credential. Creates a new storage credential. The request object is specific to the cloud: @@ -43,15 +63,17 @@ var createCmd = &cobra.Command{ credentials * **GcpServiceAcountKey** for GCP credentials. The caller must be a metastore admin and have the - **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.`, + **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -68,38 +90,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteAccountStorageCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteAccountStorageCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteAccountStorageCredentialRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the Storage Credential is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the Storage Credential is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete METASTORE_ID NAME", - Short: `Delete a storage credential.`, - Long: `Delete a storage credential. + cmd.Use = "delete METASTORE_ID NAME" + cmd.Short = `Delete a storage credential.` + cmd.Long = `Delete a storage credential. Deletes a storage credential from the metastore. The caller must be an owner - of the storage credential.`, + of the storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -111,37 +155,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetAccountStorageCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetAccountStorageCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetAccountStorageCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get METASTORE_ID NAME", - Short: `Gets the named storage credential.`, - Long: `Gets the named storage credential. + cmd.Use = "get METASTORE_ID NAME" + cmd.Short = `Gets the named storage credential.` + cmd.Long = `Gets the named storage credential. Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have a level of privilege on - the storage credential.`, + the storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -153,36 +219,58 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListAccountStorageCredentialsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListAccountStorageCredentialsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListAccountStorageCredentialsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `Get all storage credentials assigned to a metastore.`, - Long: `Get all storage credentials assigned to a metastore. + cmd.Use = "list METASTORE_ID" + cmd.Short = `Get all storage credentials assigned to a metastore.` + cmd.Long = `Get all storage credentials assigned to a metastore. Gets a list of all storage credentials that have been assigned to given - metastore.`, + metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -193,41 +281,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.AccountsUpdateStorageCredential -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.AccountsUpdateStorageCredential, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.AccountsUpdateStorageCredential + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: credential_info -} - -var updateCmd = &cobra.Command{ - Use: "update METASTORE_ID NAME", - Short: `Updates a storage credential.`, - Long: `Updates a storage credential. + cmd.Use = "update METASTORE_ID NAME" + cmd.Short = `Updates a storage credential.` + cmd.Long = `Updates a storage credential. Updates a storage credential on the metastore. The caller must be the owner of the storage credential. If the caller is a metastore admin, only the __owner__ - credential can be changed.`, + credential can be changed.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -245,10 +355,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountStorageCredentials diff --git a/cmd/account/storage/overrides.go b/cmd/account/storage/overrides.go index 76ca6ee1e..6ebe4a7a4 100644 --- a/cmd/account/storage/overrides.go +++ b/cmd/account/storage/overrides.go @@ -1,9 +1,16 @@ package storage -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.StorageConfigurationId | green}} {{.StorageConfigurationName}} {{.RootBucketInfo.BucketName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/storage/storage.go b/cmd/account/storage/storage.go index 54821d4d4..19240ccba 100755 --- a/cmd/account/storage/storage.go +++ b/cmd/account/storage/storage.go @@ -12,35 +12,55 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage", - Short: `These APIs manage storage configurations for this workspace.`, - Long: `These APIs manage storage configurations for this workspace. A root storage S3 +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage", + Short: `These APIs manage storage configurations for this workspace.`, + Long: `These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is required to store objects like cluster logs, notebook revisions, and job results. You can also use the root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this bucket information, and its ID is used when creating a new workspace.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateStorageConfigurationRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateStorageConfigurationRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateStorageConfigurationRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create new storage configuration.`, - Long: `Create new storage configuration. + cmd.Use = "create" + cmd.Short = `Create new storage configuration.` + cmd.Long = `Create new storage configuration. Creates new storage configuration for an account, specified by ID. Uploads a storage configuration object that represents the root AWS S3 bucket in your @@ -51,11 +71,12 @@ var createCmd = &cobra.Command{ For information about how to create a new workspace with this API, see [Create a new workspace using the Account API] - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -73,52 +94,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteStorageRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteStorageRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteStorageRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete STORAGE_CONFIGURATION_ID", - Short: `Delete storage configuration.`, - Long: `Delete storage configuration. + cmd.Use = "delete STORAGE_CONFIGURATION_ID" + cmd.Short = `Delete storage configuration.` + cmd.Long = `Delete storage configuration. Deletes a Databricks storage configuration. You cannot delete a storage - configuration that is associated with any workspace.`, + configuration that is associated with any workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No STORAGE_CONFIGURATION_ID argument specified. Loading names for Storage drop-down." - names, err := a.Storage.StorageConfigurationStorageConfigurationNameToStorageConfigurationIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Storage drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API storage configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api storage configuration id") - } deleteReq.StorageConfigurationId = args[0] err = a.Storage.Delete(ctx, deleteReq) @@ -126,51 +156,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetStorageRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetStorageRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetStorageRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get STORAGE_CONFIGURATION_ID", - Short: `Get storage configuration.`, - Long: `Get storage configuration. + cmd.Use = "get STORAGE_CONFIGURATION_ID" + cmd.Short = `Get storage configuration.` + cmd.Long = `Get storage configuration. - Gets a Databricks storage configuration for an account, both specified by ID.`, + Gets a Databricks storage configuration for an account, both specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No STORAGE_CONFIGURATION_ID argument specified. Loading names for Storage drop-down." - names, err := a.Storage.StorageConfigurationStorageConfigurationNameToStorageConfigurationIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Storage drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks Account API storage configuration ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks account api storage configuration id") - } getReq.StorageConfigurationId = args[0] response, err := a.Storage.Get(ctx, getReq) @@ -178,30 +217,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all storage configurations.`, - Long: `Get all storage configurations. + cmd.Use = "list" + cmd.Short = `Get all storage configurations.` + cmd.Long = `Get all storage configurations. Gets a list of all Databricks storage configurations for your account, - specified by ID.`, + specified by ID.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Storage.List(ctx) @@ -209,10 +266,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Storage diff --git a/cmd/account/users/overrides.go b/cmd/account/users/overrides.go index 45447a0ae..ff9773345 100644 --- a/cmd/account/users/overrides.go +++ b/cmd/account/users/overrides.go @@ -1,10 +1,18 @@ package users -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListAccountUsersRequest) { listReq.Attributes = "id,userName,groups,active" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.UserName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/users/users.go b/cmd/account/users/users.go index 3c3edd0f5..117fe26ce 100755 --- a/cmd/account/users/users.go +++ b/cmd/account/users/users.go @@ -3,8 +3,6 @@ package users import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "users", - Short: `User identities recognized by Databricks and represented by email addresses.`, - Long: `User identities recognized by Databricks and represented by email addresses. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks account. SCIM @@ -26,51 +29,68 @@ var Cmd = &cobra.Command{ provider and that user’s account will also be removed from Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.User -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.User, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.User + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - createCmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new user.`, - Long: `Create a new user. + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. Creates a new user in the Databricks account. This new user will also be added - to the Databricks account.`, + to the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -87,52 +107,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteAccountUserRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteAccountUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteAccountUserRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a user.`, - Long: `Delete a user. + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. Deletes a user. Deleting a user from a Databricks account also removes objects - associated with the user.`, + associated with the user.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." - names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks account") - } deleteReq.Id = args[0] err = a.Users.Delete(ctx, deleteReq) @@ -140,51 +169,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetAccountUserRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetAccountUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetAccountUserRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get user details.`, - Long: `Get user details. + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. - Gets information for a specific user in Databricks account.`, + Gets information for a specific user in Databricks account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." - names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks account") - } getReq.Id = args[0] response, err := a.Users.Get(ctx, getReq) @@ -192,48 +230,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListAccountUsersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListAccountUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListAccountUsersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List users.`, - Long: `List users. + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. - Gets details for all the users associated with a Databricks account.`, + Gets details for all the users associated with a Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -250,37 +310,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update user details.`, - Long: `Update user details. + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. Partially updates a user resource by applying the supplied operations on - specific user attributes.`, + specific user attributes.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -290,23 +376,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." - names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks account") - } patchReq.Id = args[0] err = a.Users.Patch(ctx, patchReq) @@ -314,44 +383,73 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.User -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.User, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.User + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - updateCmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a user.`, - Long: `Replace a user. + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. - Replaces a user's information with the data supplied in request.`, + Replaces a user's information with the data supplied in request.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -361,23 +459,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." - names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks user ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks user id") - } updateReq.Id = args[0] } @@ -386,10 +467,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service AccountUsers diff --git a/cmd/account/vpc-endpoints/vpc-endpoints.go b/cmd/account/vpc-endpoints/vpc-endpoints.go index 80ed3831e..d9c0f6664 100755 --- a/cmd/account/vpc-endpoints/vpc-endpoints.go +++ b/cmd/account/vpc-endpoints/vpc-endpoints.go @@ -3,8 +3,6 @@ package vpc_endpoints import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,34 +10,54 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "vpc-endpoints", - Short: `These APIs manage VPC endpoint configurations for this account.`, - Long: `These APIs manage VPC endpoint configurations for this account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "vpc-endpoints", + Short: `These APIs manage VPC endpoint configurations for this account.`, + Long: `These APIs manage VPC endpoint configurations for this account.`, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateVpcEndpointRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateVpcEndpointRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq provisioning.CreateVpcEndpointRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AwsVpcEndpointId, "aws-vpc-endpoint-id", createReq.AwsVpcEndpointId, `The ID of the VPC endpoint object in AWS.`) + cmd.Flags().StringVar(&createReq.AwsVpcEndpointId, "aws-vpc-endpoint-id", createReq.AwsVpcEndpointId, `The ID of the VPC endpoint object in AWS.`) // TODO: complex arg: gcp_vpc_endpoint_info - createCmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `The AWS region in which this VPC endpoint object exists.`) + cmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `The AWS region in which this VPC endpoint object exists.`) -} - -var createCmd = &cobra.Command{ - Use: "create VPC_ENDPOINT_NAME", - Short: `Create VPC endpoint configuration.`, - Long: `Create VPC endpoint configuration. + cmd.Use = "create VPC_ENDPOINT_NAME" + cmd.Short = `Create VPC endpoint configuration.` + cmd.Long = `Create VPC endpoint configuration. Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. @@ -53,18 +71,20 @@ var createCmd = &cobra.Command{ [AWS PrivateLink]: https://aws.amazon.com/privatelink [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html - [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html`, + [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -82,25 +102,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteVpcEndpointRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteVpcEndpointRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteVpcEndpointRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete VPC_ENDPOINT_ID", - Short: `Delete VPC endpoint configuration.`, - Long: `Delete VPC endpoint configuration. + cmd.Use = "delete VPC_ENDPOINT_ID" + cmd.Short = `Delete VPC endpoint configuration.` + cmd.Long = `Delete VPC endpoint configuration. Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate privately with Databricks over [AWS PrivateLink]. @@ -110,31 +150,20 @@ var deleteCmd = &cobra.Command{ [AWS PrivateLink]: https://aws.amazon.com/privatelink [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No VPC_ENDPOINT_ID argument specified. Loading names for Vpc Endpoints drop-down." - names, err := a.VpcEndpoints.VpcEndpointVpcEndpointNameToVpcEndpointIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Vpc Endpoints drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks VPC endpoint ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks vpc endpoint id") - } deleteReq.VpcEndpointId = args[0] err = a.VpcEndpoints.Delete(ctx, deleteReq) @@ -142,55 +171,64 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetVpcEndpointRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetVpcEndpointRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetVpcEndpointRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get VPC_ENDPOINT_ID", - Short: `Get a VPC endpoint configuration.`, - Long: `Get a VPC endpoint configuration. + cmd.Use = "get VPC_ENDPOINT_ID" + cmd.Short = `Get a VPC endpoint configuration.` + cmd.Long = `Get a VPC endpoint configuration. Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate privately with Databricks over [AWS PrivateLink]. [AWS PrivateLink]: https://aws.amazon.com/privatelink - [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html`, + [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No VPC_ENDPOINT_ID argument specified. Loading names for Vpc Endpoints drop-down." - names, err := a.VpcEndpoints.VpcEndpointVpcEndpointNameToVpcEndpointIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Vpc Endpoints drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks VPC endpoint ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks vpc endpoint id") - } getReq.VpcEndpointId = args[0] response, err := a.VpcEndpoints.Get(ctx, getReq) @@ -198,34 +236,52 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all VPC endpoint configurations.`, - Long: `Get all VPC endpoint configurations. + cmd.Use = "list" + cmd.Short = `Get all VPC endpoint configurations.` + cmd.Long = `Get all VPC endpoint configurations. Gets a list of all VPC endpoints for an account, specified by ID. Before configuring PrivateLink, read the [Databricks article about PrivateLink]. - [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html`, + [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.VpcEndpoints.List(ctx) @@ -233,10 +289,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service VpcEndpoints diff --git a/cmd/account/workspace-assignment/workspace-assignment.go b/cmd/account/workspace-assignment/workspace-assignment.go index dab357122..9e8c14045 100755 --- a/cmd/account/workspace-assignment/workspace-assignment.go +++ b/cmd/account/workspace-assignment/workspace-assignment.go @@ -12,40 +12,62 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-assignment", - Short: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, - Long: `The Workspace Permission Assignment API allows you to manage workspace +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-assignment", + Short: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, + Long: `The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq iam.DeleteWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteWorkspaceAssignmentRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteWorkspaceAssignmentRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID PRINCIPAL_ID", - Short: `Delete permissions assignment.`, - Long: `Delete permissions assignment. + cmd.Use = "delete WORKSPACE_ID PRINCIPAL_ID" + cmd.Short = `Delete permissions assignment.` + cmd.Long = `Delete permissions assignment. Deletes the workspace permissions assignment in a given account and workspace - for the specified principal.`, + for the specified principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -63,35 +85,57 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetWorkspaceAssignmentRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetWorkspaceAssignmentRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `List workspace permissions.`, - Long: `List workspace permissions. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `List workspace permissions.` + cmd.Long = `List workspace permissions. - Get an array of workspace permissions for the specified account and workspace.`, + Get an array of workspace permissions for the specified account and workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -105,36 +149,58 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListWorkspaceAssignmentRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListWorkspaceAssignmentRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListWorkspaceAssignmentRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list WORKSPACE_ID", - Short: `Get permission assignments.`, - Long: `Get permission assignments. + cmd.Use = "list WORKSPACE_ID" + cmd.Short = `Get permission assignments.` + cmd.Long = `Get permission assignments. Get the permission assignments for the specified Databricks account and - Databricks workspace.`, + Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -148,34 +214,55 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq iam.UpdateWorkspaceAssignments -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.UpdateWorkspaceAssignments, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.UpdateWorkspaceAssignments + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Create or update permissions assignment.`, - Long: `Create or update permissions assignment. + cmd.Use = "update" + cmd.Short = `Create or update permissions assignment.` + cmd.Long = `Create or update permissions assignment. Creates or updates the workspace permissions assignment in a given account and - workspace for the specified principal.`, + workspace for the specified principal.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -193,10 +280,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service WorkspaceAssignment diff --git a/cmd/account/workspaces/overrides.go b/cmd/account/workspaces/overrides.go index 458950242..283675c61 100644 --- a/cmd/account/workspaces/overrides.go +++ b/cmd/account/workspaces/overrides.go @@ -1,9 +1,16 @@ package workspaces -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .WorkspaceId}} {{.WorkspaceName}} {{.WorkspaceStatus}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/account/workspaces/workspaces.go b/cmd/account/workspaces/workspaces.go index 833d3cc00..9edf17994 100755 --- a/cmd/account/workspaces/workspaces.go +++ b/cmd/account/workspaces/workspaces.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspaces", - Short: `These APIs manage workspaces for this account.`, - Long: `These APIs manage workspaces for this account. A Databricks workspace is an +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspaces", + Short: `These APIs manage workspaces for this account.`, + Long: `These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into folders, and provides access to data and computational resources such as clusters and jobs. @@ -24,45 +29,59 @@ var Cmd = &cobra.Command{ These endpoints are available if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per account.`, - Annotations: map[string]string{ - "package": "provisioning", - }, + GroupID: "provisioning", + Annotations: map[string]string{ + "package": "provisioning", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq provisioning.CreateWorkspaceRequest -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *provisioning.CreateWorkspaceRequest, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq provisioning.CreateWorkspaceRequest + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.AwsRegion, "aws-region", createReq.AwsRegion, `The AWS region of the workspace's data plane.`) - createCmd.Flags().StringVar(&createReq.Cloud, "cloud", createReq.Cloud, `The cloud provider which the workspace uses.`) + cmd.Flags().StringVar(&createReq.AwsRegion, "aws-region", createReq.AwsRegion, `The AWS region of the workspace's data plane.`) + cmd.Flags().StringVar(&createReq.Cloud, "cloud", createReq.Cloud, `The cloud provider which the workspace uses.`) // TODO: complex arg: cloud_resource_container - createCmd.Flags().StringVar(&createReq.CredentialsId, "credentials-id", createReq.CredentialsId, `ID of the workspace's credential configuration object.`) - createCmd.Flags().StringVar(&createReq.DeploymentName, "deployment-name", createReq.DeploymentName, `The deployment name defines part of the subdomain for the workspace.`) - createCmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) - createCmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) - createCmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) - createCmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `The pricing tier of the workspace.`) - createCmd.Flags().StringVar(&createReq.PrivateAccessSettingsId, "private-access-settings-id", createReq.PrivateAccessSettingsId, `ID of the workspace's private access settings object.`) - createCmd.Flags().StringVar(&createReq.StorageConfigurationId, "storage-configuration-id", createReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) - createCmd.Flags().StringVar(&createReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", createReq.StorageCustomerManagedKeyId, `The ID of the workspace's storage encryption key configuration object.`) + cmd.Flags().StringVar(&createReq.CredentialsId, "credentials-id", createReq.CredentialsId, `ID of the workspace's credential configuration object.`) + cmd.Flags().StringVar(&createReq.DeploymentName, "deployment-name", createReq.DeploymentName, `The deployment name defines part of the subdomain for the workspace.`) + cmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) + cmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) + cmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) + cmd.Flags().Var(&createReq.PricingTier, "pricing-tier", `The pricing tier of the workspace.`) + cmd.Flags().StringVar(&createReq.PrivateAccessSettingsId, "private-access-settings-id", createReq.PrivateAccessSettingsId, `ID of the workspace's private access settings object.`) + cmd.Flags().StringVar(&createReq.StorageConfigurationId, "storage-configuration-id", createReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) + cmd.Flags().StringVar(&createReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", createReq.StorageCustomerManagedKeyId, `The ID of the workspace's storage encryption key configuration object.`) -} - -var createCmd = &cobra.Command{ - Use: "create WORKSPACE_NAME", - Short: `Create a new workspace.`, - Long: `Create a new workspace. + cmd.Use = "create WORKSPACE_NAME" + cmd.Short = `Create a new workspace.` + cmd.Long = `Create a new workspace. Creates a new workspace. @@ -72,18 +91,20 @@ var createCmd = &cobra.Command{ workspace status is typically PROVISIONING. Use the workspace ID (workspace_id) field in the response to identify the new workspace and make repeated GET requests with the workspace ID and check its status. The - workspace becomes available when the status changes to RUNNING.`, + workspace becomes available when the status changes to RUNNING.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) @@ -113,25 +134,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq provisioning.DeleteWorkspaceRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *provisioning.DeleteWorkspaceRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq provisioning.DeleteWorkspaceRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete WORKSPACE_ID", - Short: `Delete a workspace.`, - Long: `Delete a workspace. + cmd.Use = "delete WORKSPACE_ID" + cmd.Short = `Delete a workspace.` + cmd.Long = `Delete a workspace. Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate. However, it might take a few minutes for all workspaces @@ -140,31 +181,20 @@ var deleteCmd = &cobra.Command{ This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." - names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Workspace ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have workspace id") - } _, err = fmt.Sscan(args[0], &deleteReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -175,25 +205,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq provisioning.GetWorkspaceRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *provisioning.GetWorkspaceRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq provisioning.GetWorkspaceRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get WORKSPACE_ID", - Short: `Get a workspace.`, - Long: `Get a workspace. + cmd.Use = "get WORKSPACE_ID" + cmd.Short = `Get a workspace.` + cmd.Long = `Get a workspace. Gets information including status for a Databricks workspace, specified by ID. In the response, the workspace_status field indicates the current status. @@ -208,31 +258,20 @@ var getCmd = &cobra.Command{ platform or on a select custom plan that allows multiple workspaces per account. - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." - names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Workspace ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have workspace id") - } _, err = fmt.Sscan(args[0], &getReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -243,33 +282,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get all workspaces.`, - Long: `Get all workspaces. + cmd.Use = "list" + cmd.Short = `Get all workspaces.` + cmd.Long = `Get all workspaces. Gets a list of all workspaces associated with an account, specified by ID. This operation is available only if your account is on the E2 version of the platform or on a select custom plan that allows multiple workspaces per - account.`, + account.` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) response, err := a.Workspaces.List(ctx) @@ -277,38 +334,57 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq provisioning.UpdateWorkspaceRequest -var updateSkipWait bool -var updateTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *provisioning.UpdateWorkspaceRequest, +) -func init() { - Cmd.AddCommand(updateCmd) +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} - updateCmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`) - updateCmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var updateReq provisioning.UpdateWorkspaceRequest + + var updateSkipWait bool + var updateTimeout time.Duration + + cmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.AwsRegion, "aws-region", updateReq.AwsRegion, `The AWS region of the workspace's data plane (for example, us-west-2).`) - updateCmd.Flags().StringVar(&updateReq.CredentialsId, "credentials-id", updateReq.CredentialsId, `ID of the workspace's credential configuration object.`) - updateCmd.Flags().StringVar(&updateReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", updateReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) - updateCmd.Flags().StringVar(&updateReq.NetworkId, "network-id", updateReq.NetworkId, `The ID of the workspace's network configuration object.`) - updateCmd.Flags().StringVar(&updateReq.StorageConfigurationId, "storage-configuration-id", updateReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) - updateCmd.Flags().StringVar(&updateReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", updateReq.StorageCustomerManagedKeyId, `The ID of the key configuration object for workspace storage.`) + cmd.Flags().StringVar(&updateReq.AwsRegion, "aws-region", updateReq.AwsRegion, `The AWS region of the workspace's data plane (for example, us-west-2).`) + cmd.Flags().StringVar(&updateReq.CredentialsId, "credentials-id", updateReq.CredentialsId, `ID of the workspace's credential configuration object.`) + cmd.Flags().StringVar(&updateReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", updateReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) + cmd.Flags().StringVar(&updateReq.NetworkId, "network-id", updateReq.NetworkId, `The ID of the workspace's network configuration object.`) + cmd.Flags().StringVar(&updateReq.StorageConfigurationId, "storage-configuration-id", updateReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) + cmd.Flags().StringVar(&updateReq.StorageCustomerManagedKeyId, "storage-customer-managed-key-id", updateReq.StorageCustomerManagedKeyId, `The ID of the key configuration object for workspace storage.`) -} - -var updateCmd = &cobra.Command{ - Use: "update WORKSPACE_ID", - Short: `Update workspace configuration.`, - Long: `Update workspace configuration. + cmd.Use = "update WORKSPACE_ID" + cmd.Short = `Update workspace configuration.` + cmd.Long = `Update workspace configuration. Updates a workspace configuration for either a running workspace or a failed workspace. The elements that can be updated varies between these two use @@ -420,31 +496,20 @@ var updateCmd = &cobra.Command{ account. [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html - [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html`, + [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html` - Annotations: map[string]string{}, - PreRunE: root.MustAccountClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." - names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Workspace ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have workspace id") - } _, err = fmt.Sscan(args[0], &updateReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -467,10 +532,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Workspaces diff --git a/cmd/cmd.go b/cmd/cmd.go new file mode 100644 index 000000000..69502d509 --- /dev/null +++ b/cmd/cmd.go @@ -0,0 +1,40 @@ +package cmd + +import ( + "sync" + + "github.com/databricks/cli/cmd/account" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd/workspace" + "github.com/spf13/cobra" +) + +var once sync.Once +var cmd *cobra.Command + +func New() *cobra.Command { + // TODO: this command is still a global. + // Once the non-generated commands are all instantiatable, + // we can remove the global and instantiate this as well. + once.Do(func() { + cli := root.RootCmd + + // Add account subcommand. + cli.AddCommand(account.New()) + + // Add workspace subcommands. + for _, cmd := range workspace.All() { + cli.AddCommand(cmd) + } + + // Add workspace command groups. + groups := workspace.Groups() + for i := range groups { + cli.AddGroup(&groups[i]) + } + + cmd = cli + }) + + return cmd +} diff --git a/cmd/root/root.go b/cmd/root/root.go index 3b940a491..663dd645f 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -73,12 +73,12 @@ func flagErrorFunc(c *cobra.Command, err error) error { // Execute adds all child commands to the root command and sets flags appropriately. // This is called by main.main(). It only needs to happen once to the rootCmd. -func Execute() { +func Execute(cmd *cobra.Command) { // TODO: deferred panic recovery ctx := context.Background() // Run the command - cmd, err := RootCmd.ExecuteContextC(ctx) + cmd, err := cmd.ExecuteContextC(ctx) if err != nil { // If cmdio logger initialization succeeds, then this function logs with the // initialized cmdio logger, otherwise with the default cmdio logger diff --git a/cmd/workspace/alerts/alerts.go b/cmd/workspace/alerts/alerts.go index e13f72849..b96e240a6 100755 --- a/cmd/workspace/alerts/alerts.go +++ b/cmd/workspace/alerts/alerts.go @@ -12,45 +12,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "alerts", - Short: `The alerts API can be used to perform CRUD operations on alerts.`, - Long: `The alerts API can be used to perform CRUD operations on alerts. An alert is a +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "alerts", + Short: `The alerts API can be used to perform CRUD operations on alerts.`, + Long: `The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateAlert -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateAlert, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateAlert + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) - createCmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) + cmd.Flags().StringVar(&createReq.Parent, "parent", createReq.Parent, `The identifier of the workspace folder containing the object.`) + cmd.Flags().IntVar(&createReq.Rearm, "rearm", createReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create an alert.`, - Long: `Create an alert. + cmd.Use = "create" + cmd.Short = `Create an alert.` + cmd.Long = `Create an alert. Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification - destinations if the condition was met.`, + destinations if the condition was met.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -68,53 +89,62 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteAlertRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteAlertRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteAlertRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ALERT_ID", - Short: `Delete an alert.`, - Long: `Delete an alert. + cmd.Use = "delete ALERT_ID" + cmd.Short = `Delete an alert.` + cmd.Long = `Delete an alert. Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note:** Unlike queries and dashboards, alerts cannot be moved to - the trash.`, + the trash.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down." - names, err := w.Alerts.AlertNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.AlertId = args[0] err = w.Alerts.Delete(ctx, deleteReq) @@ -122,51 +152,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetAlertRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetAlertRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetAlertRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ALERT_ID", - Short: `Get an alert.`, - Long: `Get an alert. + cmd.Use = "get ALERT_ID" + cmd.Short = `Get an alert.` + cmd.Long = `Get an alert. - Gets an alert.`, + Gets an alert.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down." - names, err := w.Alerts.AlertNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.AlertId = args[0] response, err := w.Alerts.Get(ctx, getReq) @@ -174,29 +213,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get alerts.`, - Long: `Get alerts. + cmd.Use = "list" + cmd.Short = `Get alerts.` + cmd.Long = `Get alerts. - Gets a list of alerts.`, + Gets a list of alerts.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Alerts.List(ctx) @@ -204,35 +261,56 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq sql.EditAlert -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.EditAlert, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.EditAlert + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) + cmd.Flags().IntVar(&updateReq.Rearm, "rearm", updateReq.Rearm, `Number of seconds after being triggered before the alert rearms itself and can be triggered again.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update an alert.`, - Long: `Update an alert. + cmd.Use = "update" + cmd.Short = `Update an alert.` + cmd.Long = `Update an alert. - Updates an alert.`, + Updates an alert.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -250,10 +328,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Alerts diff --git a/cmd/workspace/catalogs/catalogs.go b/cmd/workspace/catalogs/catalogs.go index 78b8b1499..2c520e4da 100755 --- a/cmd/workspace/catalogs/catalogs.go +++ b/cmd/workspace/catalogs/catalogs.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "catalogs", - Short: `A catalog is the first layer of Unity Catalog’s three-level namespace.`, - Long: `A catalog is the first layer of Unity Catalog’s three-level namespace. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "catalogs", + Short: `A catalog is the first layer of Unity Catalog’s three-level namespace.`, + Long: `A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission. @@ -21,46 +26,63 @@ var Cmd = &cobra.Command{ data centrally across all of the workspaces in a Databricks account. Users in different workspaces can share access to the same data, depending on privileges granted centrally in Unity Catalog.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateCatalog -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateCatalog, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateCatalog + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) - createCmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) - createCmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within catalog.`) + cmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) + cmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) + cmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within catalog.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a catalog.`, - Long: `Create a catalog. + cmd.Use = "create NAME" + cmd.Short = `Create a catalog.` + cmd.Long = `Create a catalog. Creates a new catalog instance in the parent metastore if the caller is a - metastore admin or has the **CREATE_CATALOG** privilege.`, + metastore admin or has the **CREATE_CATALOG** privilege.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,38 +100,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteCatalogRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteCatalogRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteCatalogRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the catalog is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the catalog is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a catalog.`, - Long: `Delete a catalog. + cmd.Use = "delete NAME" + cmd.Short = `Delete a catalog.` + cmd.Long = `Delete a catalog. Deletes the catalog that matches the supplied name. The caller must be a - metastore admin or the owner of the catalog.`, + metastore admin or the owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -120,37 +164,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetCatalogRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetCatalogRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetCatalogRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a catalog.`, - Long: `Get a catalog. + cmd.Use = "get NAME" + cmd.Short = `Get a catalog.` + cmd.Long = `Get a catalog. Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the catalog, or a user that has the **USE_CATALOG** - privilege set for their account.`, + privilege set for their account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -161,33 +227,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List catalogs.`, - Long: `List catalogs. + cmd.Use = "list" + cmd.Short = `List catalogs.` + cmd.Long = `List catalogs. Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the elements in the - array.`, + array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Catalogs.ListAll(ctx) @@ -195,48 +279,70 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateCatalog -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateCatalog, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateCatalog + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of catalog.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of catalog.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of catalog.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of catalog.`) // TODO: map via StringToStringVar: properties -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a catalog.`, - Long: `Update a catalog. + cmd.Use = "update NAME" + cmd.Short = `Update a catalog.` + cmd.Long = `Update a catalog. Updates the catalog that matches the supplied name. The caller must be either the owner of the catalog, or a metastore admin (when changing the owner field - of the catalog).`, + of the catalog).` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -254,10 +360,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Catalogs diff --git a/cmd/workspace/catalogs/overrides.go b/cmd/workspace/catalogs/overrides.go index 5b8cffea9..6de7a7771 100644 --- a/cmd/workspace/catalogs/overrides.go +++ b/cmd/workspace/catalogs/overrides.go @@ -1,10 +1,17 @@ package catalogs -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Name"}} {{header "Type"}} {{header "Comment"}} {{range .}}{{.Name|green}} {{blue "%s" .CatalogType}} {{.Comment}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/clean-rooms/clean-rooms.go b/cmd/workspace/clean-rooms/clean-rooms.go index 328ed3e73..5aa704fa4 100755 --- a/cmd/workspace/clean-rooms/clean-rooms.go +++ b/cmd/workspace/clean-rooms/clean-rooms.go @@ -12,47 +12,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "clean-rooms", - Short: `A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases.`, - Long: `A clean room is a secure, privacy-protecting environment where two or more +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clean-rooms", + Short: `A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases.`, + Long: `A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases. To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** privilege.`, - Annotations: map[string]string{ - "package": "sharing", - }, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateCleanRoom -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateCleanRoom, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateCleanRoom + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a clean room.`, - Long: `Create a clean room. + cmd.Use = "create" + cmd.Short = `Create a clean room.` + cmd.Long = `Create a clean room. Creates a new clean room with specified colaborators. The caller must be a - metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.`, + metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,36 +91,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteCleanRoomRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteCleanRoomRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteCleanRoomRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME_ARG", - Short: `Delete a clean room.`, - Long: `Delete a clean room. + cmd.Use = "delete NAME_ARG" + cmd.Short = `Delete a clean room.` + cmd.Long = `Delete a clean room. Deletes a data object clean room from the metastore. The caller must be an - owner of the clean room.`, + owner of the clean room.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -110,38 +153,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetCleanRoomRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetCleanRoomRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetCleanRoomRequest + // TODO: short flags - getCmd.Flags().BoolVar(&getReq.IncludeRemoteDetails, "include-remote-details", getReq.IncludeRemoteDetails, `Whether to include remote details (central) on the clean room.`) + cmd.Flags().BoolVar(&getReq.IncludeRemoteDetails, "include-remote-details", getReq.IncludeRemoteDetails, `Whether to include remote details (central) on the clean room.`) -} - -var getCmd = &cobra.Command{ - Use: "get NAME_ARG", - Short: `Get a clean room.`, - Long: `Get a clean room. + cmd.Use = "get NAME_ARG" + cmd.Short = `Get a clean room.` + cmd.Long = `Get a clean room. Gets a data object clean room from the metastore. The caller must be a - metastore admin or the owner of the clean room.`, + metastore admin or the owner of the clean room.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -152,31 +217,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List clean rooms.`, - Long: `List clean rooms. + cmd.Use = "list" + cmd.Short = `List clean rooms.` + cmd.Long = `List clean rooms. Gets an array of data object clean rooms from the metastore. The caller must be a metastore admin or the owner of the clean room. There is no guarantee of - a specific ordering of the elements in the array.`, + a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.CleanRooms.ListAll(ctx) @@ -184,32 +267,52 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq sharing.UpdateCleanRoom -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateCleanRoom, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateCleanRoom + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: catalog_updates - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the clean room.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of clean room.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the clean room.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of clean room.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME_ARG", - Short: `Update a clean room.`, - Long: `Update a clean room. + cmd.Use = "update NAME_ARG" + cmd.Short = `Update a clean room.` + cmd.Long = `Update a clean room. Updates the clean room with the changes and data objects in the request. The caller must be the owner of the clean room or a metastore admin. @@ -224,15 +327,17 @@ var updateCmd = &cobra.Command{ indefinitely for recipients to be able to access the table. Typically, you should use a group as the clean room owner. - Table removals through **update** do not require additional privileges.`, + Table removals through **update** do not require additional privileges.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -249,10 +354,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service CleanRooms diff --git a/cmd/workspace/cluster-policies/cluster-policies.go b/cmd/workspace/cluster-policies/cluster-policies.go index 1f1d36c1d..c4f00e52b 100755 --- a/cmd/workspace/cluster-policies/cluster-policies.go +++ b/cmd/workspace/cluster-policies/cluster-policies.go @@ -3,8 +3,6 @@ package cluster_policies import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "cluster-policies", - Short: `Cluster policy limits the ability to configure clusters based on a set of rules.`, - Long: `Cluster policy limits the ability to configure clusters based on a set of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "cluster-policies", + Short: `Cluster policy limits the ability to configure clusters based on a set of rules.`, + Long: `Cluster policy limits the ability to configure clusters based on a set of rules. The policy rules limit the attributes or attribute values available for cluster creation. Cluster policies have ACLs that limit their use to specific users and groups. @@ -39,45 +42,62 @@ var Cmd = &cobra.Command{ Only admin users can create, edit, and delete policies. Admin users also have access to all policies.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.CreatePolicy -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreatePolicy, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.CreatePolicy + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Definition, "definition", createReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) - createCmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `Additional human-readable description of the cluster policy.`) - createCmd.Flags().Int64Var(&createReq.MaxClustersPerUser, "max-clusters-per-user", createReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) - createCmd.Flags().StringVar(&createReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", createReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) - createCmd.Flags().StringVar(&createReq.PolicyFamilyId, "policy-family-id", createReq.PolicyFamilyId, `ID of the policy family.`) + cmd.Flags().StringVar(&createReq.Definition, "definition", createReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) + cmd.Flags().StringVar(&createReq.Description, "description", createReq.Description, `Additional human-readable description of the cluster policy.`) + cmd.Flags().Int64Var(&createReq.MaxClustersPerUser, "max-clusters-per-user", createReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) + cmd.Flags().StringVar(&createReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", createReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) + cmd.Flags().StringVar(&createReq.PolicyFamilyId, "policy-family-id", createReq.PolicyFamilyId, `ID of the policy family.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a new policy.`, - Long: `Create a new policy. + cmd.Use = "create NAME" + cmd.Short = `Create a new policy.` + cmd.Long = `Create a new policy. - Creates a new policy with prescribed settings.`, + Creates a new policy with prescribed settings.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,34 +115,63 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeletePolicy -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeletePolicy, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeletePolicy + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete POLICY_ID", - Short: `Delete a cluster policy.`, - Long: `Delete a cluster policy. + cmd.Use = "delete POLICY_ID" + cmd.Short = `Delete a cluster policy.` + cmd.Long = `Delete a cluster policy. Delete a policy for a cluster. Clusters governed by this policy can still run, - but cannot be edited.`, + but cannot be edited.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -132,23 +181,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No POLICY_ID argument specified. Loading names for Cluster Policies drop-down." - names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the policy to delete") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the policy to delete") - } deleteReq.PolicyId = args[0] } @@ -157,47 +189,69 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditPolicy -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditPolicy, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.EditPolicy + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().StringVar(&editReq.Definition, "definition", editReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) - editCmd.Flags().StringVar(&editReq.Description, "description", editReq.Description, `Additional human-readable description of the cluster policy.`) - editCmd.Flags().Int64Var(&editReq.MaxClustersPerUser, "max-clusters-per-user", editReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) - editCmd.Flags().StringVar(&editReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", editReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) - editCmd.Flags().StringVar(&editReq.PolicyFamilyId, "policy-family-id", editReq.PolicyFamilyId, `ID of the policy family.`) + cmd.Flags().StringVar(&editReq.Definition, "definition", editReq.Definition, `Policy definition document expressed in Databricks Cluster Policy Definition Language.`) + cmd.Flags().StringVar(&editReq.Description, "description", editReq.Description, `Additional human-readable description of the cluster policy.`) + cmd.Flags().Int64Var(&editReq.MaxClustersPerUser, "max-clusters-per-user", editReq.MaxClustersPerUser, `Max number of clusters per user that can be active using this policy.`) + cmd.Flags().StringVar(&editReq.PolicyFamilyDefinitionOverrides, "policy-family-definition-overrides", editReq.PolicyFamilyDefinitionOverrides, `Policy definition JSON document expressed in Databricks Policy Definition Language.`) + cmd.Flags().StringVar(&editReq.PolicyFamilyId, "policy-family-id", editReq.PolicyFamilyId, `ID of the policy family.`) -} - -var editCmd = &cobra.Command{ - Use: "edit POLICY_ID NAME", - Short: `Update a cluster policy.`, - Long: `Update a cluster policy. + cmd.Use = "edit POLICY_ID NAME" + cmd.Short = `Update a cluster policy.` + cmd.Long = `Update a cluster policy. Update an existing policy for cluster. This operation may make some clusters - governed by the previous policy invalid.`, + governed by the previous policy invalid.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -216,51 +270,60 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq compute.GetClusterPolicyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetClusterPolicyRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get POLICY_ID", - Short: `Get entity.`, - Long: `Get entity. + cmd.Use = "get POLICY_ID" + cmd.Short = `Get entity.` + cmd.Long = `Get entity. - Get a cluster policy entity. Creation and editing is available to admins only.`, + Get a cluster policy entity. Creation and editing is available to admins only.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No POLICY_ID argument specified. Loading names for Cluster Policies drop-down." - names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Canonical unique identifier for the cluster policy") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have canonical unique identifier for the cluster policy") - } getReq.PolicyId = args[0] response, err := w.ClusterPolicies.Get(ctx, getReq) @@ -268,43 +331,65 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq compute.ListClusterPoliciesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListClusterPoliciesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListClusterPoliciesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Var(&listReq.SortColumn, "sort-column", `The cluster policy attribute to sort by.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order in which the policies get listed.`) + cmd.Flags().Var(&listReq.SortColumn, "sort-column", `The cluster policy attribute to sort by.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order in which the policies get listed.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a cluster policy.`, - Long: `Get a cluster policy. + cmd.Use = "list" + cmd.Short = `Get a cluster policy.` + cmd.Long = `Get a cluster policy. - Returns a list of policies accessible by the requesting user.`, + Returns a list of policies accessible by the requesting user.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -321,10 +406,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service ClusterPolicies diff --git a/cmd/workspace/cluster-policies/overrides.go b/cmd/workspace/cluster-policies/overrides.go index dea5c6bfc..9278b29c3 100644 --- a/cmd/workspace/cluster-policies/overrides.go +++ b/cmd/workspace/cluster-policies/overrides.go @@ -1,11 +1,22 @@ package cluster_policies -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *compute.ListClusterPoliciesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.PolicyId | green}} {{.Name}} {{end}}`) +} +func getOverride(getCmd *cobra.Command, _ *compute.GetClusterPolicyRequest) { getCmd.Annotations["template"] = cmdio.Heredoc(`{{.Definition | pretty_json}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) + getOverrides = append(getOverrides, getOverride) +} diff --git a/cmd/workspace/clusters/clusters.go b/cmd/workspace/clusters/clusters.go index bc891eef7..432fbff4d 100755 --- a/cmd/workspace/clusters/clusters.go +++ b/cmd/workspace/clusters/clusters.go @@ -3,7 +3,6 @@ package clusters import ( - "fmt" "time" "github.com/databricks/cli/cmd/root" @@ -13,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "clusters", - Short: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.`, - Long: `The Clusters API allows you to create, start, edit, list, terminate, and +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "clusters", + Short: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.`, + Long: `The Clusters API allows you to create, start, edit, list, terminate, and delete clusters. Databricks maps cluster node instance types to compute units known as DBUs. @@ -43,40 +47,57 @@ var Cmd = &cobra.Command{ recently terminated by the job scheduler. To keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start change-owner command -var changeOwnerReq compute.ChangeClusterOwner -var changeOwnerJson flags.JsonFlag -func init() { - Cmd.AddCommand(changeOwnerCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var changeOwnerOverrides []func( + *cobra.Command, + *compute.ChangeClusterOwner, +) + +func newChangeOwner() *cobra.Command { + cmd := &cobra.Command{} + + var changeOwnerReq compute.ChangeClusterOwner + var changeOwnerJson flags.JsonFlag + // TODO: short flags - changeOwnerCmd.Flags().Var(&changeOwnerJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&changeOwnerJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var changeOwnerCmd = &cobra.Command{ - Use: "change-owner CLUSTER_ID OWNER_USERNAME", - Short: `Change cluster owner.`, - Long: `Change cluster owner. + cmd.Use = "change-owner CLUSTER_ID OWNER_USERNAME" + cmd.Short = `Change cluster owner.` + cmd.Long = `Change cluster owner. Change the owner of the cluster. You must be an admin to perform this - operation.`, + operation.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,58 +116,77 @@ var changeOwnerCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range changeOwnerOverrides { + fn(cmd, &changeOwnerReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newChangeOwner()) + }) } // start create command -var createReq compute.CreateCluster -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreateCluster, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq compute.CreateCluster + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.ApplyPolicyDefaultValues, "apply-policy-default-values", createReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) + cmd.Flags().BoolVar(&createReq.ApplyPolicyDefaultValues, "apply-policy-default-values", createReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) // TODO: complex arg: autoscale - createCmd.Flags().IntVar(&createReq.AutoterminationMinutes, "autotermination-minutes", createReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) + cmd.Flags().IntVar(&createReq.AutoterminationMinutes, "autotermination-minutes", createReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: complex arg: cluster_log_conf - createCmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) - createCmd.Flags().Var(&createReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) + cmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) + cmd.Flags().Var(&createReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) // TODO: map via StringToStringVar: custom_tags - createCmd.Flags().StringVar(&createReq.DriverInstancePoolId, "driver-instance-pool-id", createReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) - createCmd.Flags().StringVar(&createReq.DriverNodeTypeId, "driver-node-type-id", createReq.DriverNodeTypeId, `The node type of the Spark driver.`) - createCmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) - createCmd.Flags().BoolVar(&createReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", createReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) + cmd.Flags().StringVar(&createReq.DriverInstancePoolId, "driver-instance-pool-id", createReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) + cmd.Flags().StringVar(&createReq.DriverNodeTypeId, "driver-node-type-id", createReq.DriverNodeTypeId, `The node type of the Spark driver.`) + cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&createReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", createReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) // TODO: complex arg: gcp_attributes // TODO: array: init_scripts - createCmd.Flags().StringVar(&createReq.InstancePoolId, "instance-pool-id", createReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) - createCmd.Flags().StringVar(&createReq.NodeTypeId, "node-type-id", createReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) - createCmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) - createCmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) - createCmd.Flags().Var(&createReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&createReq.InstancePoolId, "instance-pool-id", createReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) + cmd.Flags().StringVar(&createReq.NodeTypeId, "node-type-id", createReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) + cmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) + cmd.Flags().Var(&createReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) // TODO: map via StringToStringVar: spark_conf // TODO: map via StringToStringVar: spark_env_vars // TODO: array: ssh_public_keys // TODO: complex arg: workload_type -} - -var createCmd = &cobra.Command{ - Use: "create SPARK_VERSION", - Short: `Create new cluster.`, - Long: `Create new cluster. + cmd.Use = "create SPARK_VERSION" + cmd.Short = `Create new cluster.` + cmd.Long = `Create new cluster. Creates a new Spark cluster. This method will acquire new instances from the cloud provider if necessary. Note: Databricks may not be able to acquire some @@ -155,18 +195,20 @@ var createCmd = &cobra.Command{ If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed. Otherwise the cluster will terminate with an - informative error message.`, + informative error message.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -196,42 +238,70 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteCluster -var deleteJson flags.JsonFlag -var deleteSkipWait bool -var deleteTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteCluster, +) -func init() { - Cmd.AddCommand(deleteCmd) +func newDelete() *cobra.Command { + cmd := &cobra.Command{} - deleteCmd.Flags().BoolVar(&deleteSkipWait, "no-wait", deleteSkipWait, `do not wait to reach TERMINATED state`) - deleteCmd.Flags().DurationVar(&deleteTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED state`) + var deleteReq compute.DeleteCluster + var deleteJson flags.JsonFlag + + var deleteSkipWait bool + var deleteTimeout time.Duration + + cmd.Flags().BoolVar(&deleteSkipWait, "no-wait", deleteSkipWait, `do not wait to reach TERMINATED state`) + cmd.Flags().DurationVar(&deleteTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED state`) // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete CLUSTER_ID", - Short: `Terminate cluster.`, - Long: `Terminate cluster. + cmd.Use = "delete CLUSTER_ID" + cmd.Short = `Terminate cluster.` + cmd.Long = `Terminate cluster. Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the termination has completed, the cluster will be in a TERMINATED state. If the cluster is already in a TERMINATING or - TERMINATED state, nothing will happen.`, + TERMINATED state, nothing will happen.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -241,23 +311,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster to be terminated") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster to be terminated") - } deleteReq.ClusterId = args[0] } @@ -278,61 +331,80 @@ var deleteCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditCluster -var editJson flags.JsonFlag -var editSkipWait bool -var editTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditCluster, +) -func init() { - Cmd.AddCommand(editCmd) +func newEdit() *cobra.Command { + cmd := &cobra.Command{} - editCmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) - editCmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var editReq compute.EditCluster + var editJson flags.JsonFlag + + var editSkipWait bool + var editTimeout time.Duration + + cmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().BoolVar(&editReq.ApplyPolicyDefaultValues, "apply-policy-default-values", editReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) + cmd.Flags().BoolVar(&editReq.ApplyPolicyDefaultValues, "apply-policy-default-values", editReq.ApplyPolicyDefaultValues, `Note: This field won't be true for webapp requests.`) // TODO: complex arg: autoscale - editCmd.Flags().IntVar(&editReq.AutoterminationMinutes, "autotermination-minutes", editReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) + cmd.Flags().IntVar(&editReq.AutoterminationMinutes, "autotermination-minutes", editReq.AutoterminationMinutes, `Automatically terminates the cluster after it is inactive for this time in minutes.`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: complex arg: cluster_log_conf - editCmd.Flags().StringVar(&editReq.ClusterName, "cluster-name", editReq.ClusterName, `Cluster name requested by the user.`) - editCmd.Flags().Var(&editReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) + cmd.Flags().StringVar(&editReq.ClusterName, "cluster-name", editReq.ClusterName, `Cluster name requested by the user.`) + cmd.Flags().Var(&editReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) // TODO: map via StringToStringVar: custom_tags - editCmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) + cmd.Flags().Var(&editReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) // TODO: complex arg: docker_image - editCmd.Flags().StringVar(&editReq.DriverInstancePoolId, "driver-instance-pool-id", editReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) - editCmd.Flags().StringVar(&editReq.DriverNodeTypeId, "driver-node-type-id", editReq.DriverNodeTypeId, `The node type of the Spark driver.`) - editCmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) - editCmd.Flags().BoolVar(&editReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", editReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) + cmd.Flags().StringVar(&editReq.DriverInstancePoolId, "driver-instance-pool-id", editReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) + cmd.Flags().StringVar(&editReq.DriverNodeTypeId, "driver-node-type-id", editReq.DriverNodeTypeId, `The node type of the Spark driver.`) + cmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&editReq.EnableLocalDiskEncryption, "enable-local-disk-encryption", editReq.EnableLocalDiskEncryption, `Whether to enable LUKS on cluster VMs' local disks.`) // TODO: complex arg: gcp_attributes // TODO: array: init_scripts - editCmd.Flags().StringVar(&editReq.InstancePoolId, "instance-pool-id", editReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) - editCmd.Flags().StringVar(&editReq.NodeTypeId, "node-type-id", editReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) - editCmd.Flags().IntVar(&editReq.NumWorkers, "num-workers", editReq.NumWorkers, `Number of worker nodes that this cluster should have.`) - editCmd.Flags().StringVar(&editReq.PolicyId, "policy-id", editReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) - editCmd.Flags().Var(&editReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) - editCmd.Flags().StringVar(&editReq.SingleUserName, "single-user-name", editReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) + cmd.Flags().StringVar(&editReq.InstancePoolId, "instance-pool-id", editReq.InstancePoolId, `The optional ID of the instance pool to which the cluster belongs.`) + cmd.Flags().StringVar(&editReq.NodeTypeId, "node-type-id", editReq.NodeTypeId, `This field encodes, through a single value, the resources available to each of the Spark nodes in this cluster.`) + cmd.Flags().IntVar(&editReq.NumWorkers, "num-workers", editReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().StringVar(&editReq.PolicyId, "policy-id", editReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) + cmd.Flags().Var(&editReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&editReq.SingleUserName, "single-user-name", editReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) // TODO: map via StringToStringVar: spark_conf // TODO: map via StringToStringVar: spark_env_vars // TODO: array: ssh_public_keys // TODO: complex arg: workload_type -} - -var editCmd = &cobra.Command{ - Use: "edit CLUSTER_ID SPARK_VERSION", - Short: `Update cluster configuration.`, - Long: `Update cluster configuration. + cmd.Use = "edit CLUSTER_ID SPARK_VERSION" + cmd.Short = `Update cluster configuration.` + cmd.Long = `Update cluster configuration. Updates the configuration of a cluster to match the provided attributes and size. A cluster can be updated if it is in a RUNNING or TERMINATED state. @@ -345,18 +417,20 @@ var editCmd = &cobra.Command{ new attributes will take effect. Any attempt to update a cluster in any other state will be rejected with an INVALID_STATE error code. - Clusters created by the Databricks Jobs service cannot be edited.`, + Clusters created by the Databricks Jobs service cannot be edited.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -387,42 +461,71 @@ var editCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start events command -var eventsReq compute.GetEvents -var eventsJson flags.JsonFlag -func init() { - Cmd.AddCommand(eventsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var eventsOverrides []func( + *cobra.Command, + *compute.GetEvents, +) + +func newEvents() *cobra.Command { + cmd := &cobra.Command{} + + var eventsReq compute.GetEvents + var eventsJson flags.JsonFlag + // TODO: short flags - eventsCmd.Flags().Var(&eventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&eventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - eventsCmd.Flags().Int64Var(&eventsReq.EndTime, "end-time", eventsReq.EndTime, `The end time in epoch milliseconds.`) + cmd.Flags().Int64Var(&eventsReq.EndTime, "end-time", eventsReq.EndTime, `The end time in epoch milliseconds.`) // TODO: array: event_types - eventsCmd.Flags().Int64Var(&eventsReq.Limit, "limit", eventsReq.Limit, `The maximum number of events to include in a page of events.`) - eventsCmd.Flags().Int64Var(&eventsReq.Offset, "offset", eventsReq.Offset, `The offset in the result set.`) - eventsCmd.Flags().Var(&eventsReq.Order, "order", `The order to list events in; either "ASC" or "DESC".`) - eventsCmd.Flags().Int64Var(&eventsReq.StartTime, "start-time", eventsReq.StartTime, `The start time in epoch milliseconds.`) + cmd.Flags().Int64Var(&eventsReq.Limit, "limit", eventsReq.Limit, `The maximum number of events to include in a page of events.`) + cmd.Flags().Int64Var(&eventsReq.Offset, "offset", eventsReq.Offset, `The offset in the result set.`) + cmd.Flags().Var(&eventsReq.Order, "order", `The order to list events in; either "ASC" or "DESC".`) + cmd.Flags().Int64Var(&eventsReq.StartTime, "start-time", eventsReq.StartTime, `The start time in epoch milliseconds.`) -} - -var eventsCmd = &cobra.Command{ - Use: "events CLUSTER_ID", - Short: `List cluster activity events.`, - Long: `List cluster activity events. + cmd.Use = "events CLUSTER_ID" + cmd.Short = `List cluster activity events.` + cmd.Long = `List cluster activity events. Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more events to read, the response includes all the - nparameters necessary to request the next page of events.`, + nparameters necessary to request the next page of events.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -432,23 +535,6 @@ var eventsCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the cluster to retrieve events about") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the cluster to retrieve events about") - } eventsReq.ClusterId = args[0] } @@ -457,58 +543,66 @@ var eventsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range eventsOverrides { + fn(cmd, &eventsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEvents()) + }) } // start get command -var getReq compute.GetClusterRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetClusterRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq compute.GetClusterRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CLUSTER_ID", - Short: `Get cluster info.`, - Long: `Get cluster info. + cmd.Use = "get CLUSTER_ID" + cmd.Short = `Get cluster info.` + cmd.Long = `Get cluster info. Retrieves the information for a cluster given its identifier. Clusters can be - described while they are running, or up to 60 days after they are terminated.`, + described while they are running, or up to 60 days after they are terminated.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster about which to retrieve information") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster about which to retrieve information") - } getReq.ClusterId = args[0] response, err := w.Clusters.Get(ctx, getReq) @@ -516,29 +610,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq compute.ListClustersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListClustersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListClustersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CanUseClient, "can-use-client", listReq.CanUseClient, `Filter clusters based on what type of client it can be used for.`) + cmd.Flags().StringVar(&listReq.CanUseClient, "can-use-client", listReq.CanUseClient, `Filter clusters based on what type of client it can be used for.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List all clusters.`, - Long: `List all clusters. + cmd.Use = "list" + cmd.Short = `List all clusters.` + cmd.Long = `List all clusters. Return information about all pinned clusters, active clusters, up to 200 of the most recently terminated all-purpose clusters in the past 30 days, and up @@ -548,18 +662,20 @@ var listCmd = &cobra.Command{ all-purpose clusters in the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters.`, + terminated job clusters.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -576,30 +692,48 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-node-types command -func init() { - Cmd.AddCommand(listNodeTypesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listNodeTypesOverrides []func( + *cobra.Command, +) -} +func newListNodeTypes() *cobra.Command { + cmd := &cobra.Command{} -var listNodeTypesCmd = &cobra.Command{ - Use: "list-node-types", - Short: `List node types.`, - Long: `List node types. + cmd.Use = "list-node-types" + cmd.Short = `List node types.` + cmd.Long = `List node types. Returns a list of supported Spark node types. These node types can be used to - launch a cluster.`, + launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.ListNodeTypes(ctx) @@ -607,30 +741,48 @@ var listNodeTypesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listNodeTypesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListNodeTypes()) + }) } // start list-zones command -func init() { - Cmd.AddCommand(listZonesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listZonesOverrides []func( + *cobra.Command, +) -} +func newListZones() *cobra.Command { + cmd := &cobra.Command{} -var listZonesCmd = &cobra.Command{ - Use: "list-zones", - Short: `List availability zones.`, - Long: `List availability zones. + cmd.Use = "list-zones" + cmd.Short = `List availability zones.` + cmd.Long = `List availability zones. Returns a list of availability zones where clusters can be created in (For - example, us-west-2a). These zones can be used to launch a cluster.`, + example, us-west-2a). These zones can be used to launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.ListZones(ctx) @@ -638,38 +790,67 @@ var listZonesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listZonesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListZones()) + }) } // start permanent-delete command -var permanentDeleteReq compute.PermanentDeleteCluster -var permanentDeleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(permanentDeleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var permanentDeleteOverrides []func( + *cobra.Command, + *compute.PermanentDeleteCluster, +) + +func newPermanentDelete() *cobra.Command { + cmd := &cobra.Command{} + + var permanentDeleteReq compute.PermanentDeleteCluster + var permanentDeleteJson flags.JsonFlag + // TODO: short flags - permanentDeleteCmd.Flags().Var(&permanentDeleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&permanentDeleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var permanentDeleteCmd = &cobra.Command{ - Use: "permanent-delete CLUSTER_ID", - Short: `Permanently delete cluster.`, - Long: `Permanently delete cluster. + cmd.Use = "permanent-delete CLUSTER_ID" + cmd.Short = `Permanently delete cluster.` + cmd.Long = `Permanently delete cluster. Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously removed. In addition, users will no longer see permanently deleted clusters in the cluster list, and API users can no longer perform any action on permanently - deleted clusters.`, + deleted clusters.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -679,23 +860,6 @@ var permanentDeleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster to be deleted") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster to be deleted") - } permanentDeleteReq.ClusterId = args[0] } @@ -704,35 +868,64 @@ var permanentDeleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range permanentDeleteOverrides { + fn(cmd, &permanentDeleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPermanentDelete()) + }) } // start pin command -var pinReq compute.PinCluster -var pinJson flags.JsonFlag -func init() { - Cmd.AddCommand(pinCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var pinOverrides []func( + *cobra.Command, + *compute.PinCluster, +) + +func newPin() *cobra.Command { + cmd := &cobra.Command{} + + var pinReq compute.PinCluster + var pinJson flags.JsonFlag + // TODO: short flags - pinCmd.Flags().Var(&pinJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&pinJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var pinCmd = &cobra.Command{ - Use: "pin CLUSTER_ID", - Short: `Pin cluster.`, - Long: `Pin cluster. + cmd.Use = "pin CLUSTER_ID" + cmd.Short = `Pin cluster.` + cmd.Long = `Pin cluster. Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a cluster that is already pinned will have no - effect. This API can only be called by workspace admins.`, + effect. This API can only be called by workspace admins.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -742,23 +935,6 @@ var pinCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } pinReq.ClusterId = args[0] } @@ -767,43 +943,71 @@ var pinCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range pinOverrides { + fn(cmd, &pinReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPin()) + }) } // start resize command -var resizeReq compute.ResizeCluster -var resizeJson flags.JsonFlag -var resizeSkipWait bool -var resizeTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resizeOverrides []func( + *cobra.Command, + *compute.ResizeCluster, +) -func init() { - Cmd.AddCommand(resizeCmd) +func newResize() *cobra.Command { + cmd := &cobra.Command{} - resizeCmd.Flags().BoolVar(&resizeSkipWait, "no-wait", resizeSkipWait, `do not wait to reach RUNNING state`) - resizeCmd.Flags().DurationVar(&resizeTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var resizeReq compute.ResizeCluster + var resizeJson flags.JsonFlag + + var resizeSkipWait bool + var resizeTimeout time.Duration + + cmd.Flags().BoolVar(&resizeSkipWait, "no-wait", resizeSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&resizeTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - resizeCmd.Flags().Var(&resizeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&resizeJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: autoscale - resizeCmd.Flags().IntVar(&resizeReq.NumWorkers, "num-workers", resizeReq.NumWorkers, `Number of worker nodes that this cluster should have.`) + cmd.Flags().IntVar(&resizeReq.NumWorkers, "num-workers", resizeReq.NumWorkers, `Number of worker nodes that this cluster should have.`) -} - -var resizeCmd = &cobra.Command{ - Use: "resize CLUSTER_ID", - Short: `Resize cluster.`, - Long: `Resize cluster. + cmd.Use = "resize CLUSTER_ID" + cmd.Short = `Resize cluster.` + cmd.Long = `Resize cluster. Resizes a cluster to have a desired number of workers. This will fail unless - the cluster is in a RUNNING state.`, + the cluster is in a RUNNING state.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -813,23 +1017,6 @@ var resizeCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster to be resized") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster to be resized") - } resizeReq.ClusterId = args[0] } @@ -850,42 +1037,70 @@ var resizeCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resizeOverrides { + fn(cmd, &resizeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newResize()) + }) } // start restart command -var restartReq compute.RestartCluster -var restartJson flags.JsonFlag -var restartSkipWait bool -var restartTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restartOverrides []func( + *cobra.Command, + *compute.RestartCluster, +) -func init() { - Cmd.AddCommand(restartCmd) +func newRestart() *cobra.Command { + cmd := &cobra.Command{} - restartCmd.Flags().BoolVar(&restartSkipWait, "no-wait", restartSkipWait, `do not wait to reach RUNNING state`) - restartCmd.Flags().DurationVar(&restartTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var restartReq compute.RestartCluster + var restartJson flags.JsonFlag + + var restartSkipWait bool + var restartTimeout time.Duration + + cmd.Flags().BoolVar(&restartSkipWait, "no-wait", restartSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&restartTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - restartCmd.Flags().Var(&restartJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restartJson, "json", `either inline JSON string or @path/to/file.json with request body`) - restartCmd.Flags().StringVar(&restartReq.RestartUser, "restart-user", restartReq.RestartUser, `.`) + cmd.Flags().StringVar(&restartReq.RestartUser, "restart-user", restartReq.RestartUser, `.`) -} - -var restartCmd = &cobra.Command{ - Use: "restart CLUSTER_ID", - Short: `Restart cluster.`, - Long: `Restart cluster. + cmd.Use = "restart CLUSTER_ID" + cmd.Short = `Restart cluster.` + cmd.Long = `Restart cluster. Restarts a Spark cluster with the supplied ID. If the cluster is not currently - in a RUNNING state, nothing will happen.`, + in a RUNNING state, nothing will happen.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -895,23 +1110,6 @@ var restartCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster to be started") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster to be started") - } restartReq.ClusterId = args[0] } @@ -932,30 +1130,48 @@ var restartCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restartOverrides { + fn(cmd, &restartReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestart()) + }) } // start spark-versions command -func init() { - Cmd.AddCommand(sparkVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sparkVersionsOverrides []func( + *cobra.Command, +) -} +func newSparkVersions() *cobra.Command { + cmd := &cobra.Command{} -var sparkVersionsCmd = &cobra.Command{ - Use: "spark-versions", - Short: `List available Spark versions.`, - Long: `List available Spark versions. + cmd.Use = "spark-versions" + cmd.Short = `List available Spark versions.` + cmd.Long = `List available Spark versions. Returns the list of available Spark versions. These versions can be used to - launch a cluster.`, + launch a cluster.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Clusters.SparkVersions(ctx) @@ -963,33 +1179,52 @@ var sparkVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sparkVersionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSparkVersions()) + }) } // start start command -var startReq compute.StartCluster -var startJson flags.JsonFlag -var startSkipWait bool -var startTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startOverrides []func( + *cobra.Command, + *compute.StartCluster, +) -func init() { - Cmd.AddCommand(startCmd) +func newStart() *cobra.Command { + cmd := &cobra.Command{} - startCmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) - startCmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var startReq compute.StartCluster + var startJson flags.JsonFlag + + var startSkipWait bool + var startTimeout time.Duration + + cmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - startCmd.Flags().Var(&startJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&startJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var startCmd = &cobra.Command{ - Use: "start CLUSTER_ID", - Short: `Start terminated cluster.`, - Long: `Start terminated cluster. + cmd.Use = "start CLUSTER_ID" + cmd.Short = `Start terminated cluster.` + cmd.Long = `Start terminated cluster. Starts a terminated Spark cluster with the supplied ID. This works similar to createCluster except: @@ -998,11 +1233,20 @@ var startCmd = &cobra.Command{ with the last specified cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with the minimum number of nodes. * If the cluster is not currently in a TERMINATED state, nothing will - happen. * Clusters launched to run a job cannot be started.`, + happen. * Clusters launched to run a job cannot be started.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1012,23 +1256,6 @@ var startCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The cluster to be started") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the cluster to be started") - } startReq.ClusterId = args[0] } @@ -1049,35 +1276,64 @@ var startCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startOverrides { + fn(cmd, &startReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStart()) + }) } // start unpin command -var unpinReq compute.UnpinCluster -var unpinJson flags.JsonFlag -func init() { - Cmd.AddCommand(unpinCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var unpinOverrides []func( + *cobra.Command, + *compute.UnpinCluster, +) + +func newUnpin() *cobra.Command { + cmd := &cobra.Command{} + + var unpinReq compute.UnpinCluster + var unpinJson flags.JsonFlag + // TODO: short flags - unpinCmd.Flags().Var(&unpinJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&unpinJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var unpinCmd = &cobra.Command{ - Use: "unpin CLUSTER_ID", - Short: `Unpin cluster.`, - Long: `Unpin cluster. + cmd.Use = "unpin CLUSTER_ID" + cmd.Short = `Unpin cluster.` + cmd.Long = `Unpin cluster. Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API. Unpinning a cluster that is not pinned will have no effect. - This API can only be called by workspace admins.`, + This API can only be called by workspace admins.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1087,23 +1343,6 @@ var unpinCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." - names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } unpinReq.ClusterId = args[0] } @@ -1112,10 +1351,24 @@ var unpinCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range unpinOverrides { + fn(cmd, &unpinReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUnpin()) + }) } // end service Clusters diff --git a/cmd/workspace/clusters/overrides.go b/cmd/workspace/clusters/overrides.go index 1e8818952..ab32a4cd8 100644 --- a/cmd/workspace/clusters/overrides.go +++ b/cmd/workspace/clusters/overrides.go @@ -1,19 +1,33 @@ package clusters -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *compute.ListClustersRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "State"}} {{range .}}{{.ClusterId | green}} {{.ClusterName | cyan}} {{if eq .State "RUNNING"}}{{green "%s" .State}}{{else if eq .State "TERMINATED"}}{{red "%s" .State}}{{else}}{{blue "%s" .State}}{{end}} {{end}}`) +} +func listNodeTypesOverride(listNodeTypesCmd *cobra.Command) { listNodeTypesCmd.Annotations["template"] = cmdio.Heredoc(` {{range .NodeTypes}}{{.NodeTypeId | green}} {{.NumCores}} {{.MemoryMb}} {{.Category | blue}} {{end}}`) +} +func sparkVersionsOverride(sparkVersionsCmd *cobra.Command) { sparkVersionsCmd.Annotations["template"] = cmdio.Heredoc(` {{range .Versions}}{{.Key | green}} {{.Name}} {{end}} `) } + +func init() { + listOverrides = append(listOverrides, listOverride) + listNodeTypesOverrides = append(listNodeTypesOverrides, listNodeTypesOverride) + sparkVersionsOverrides = append(sparkVersionsOverrides, sparkVersionsOverride) +} diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 596dba829..74dcc3a58 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -3,8 +3,6 @@ package workspace import ( - "github.com/databricks/cli/cmd/root" - alerts "github.com/databricks/cli/cmd/workspace/alerts" catalogs "github.com/databricks/cli/cmd/workspace/catalogs" clean_rooms "github.com/databricks/cli/cmd/workspace/clean-rooms" @@ -54,108 +52,61 @@ import ( workspace "github.com/databricks/cli/cmd/workspace/workspace" workspace_bindings "github.com/databricks/cli/cmd/workspace/workspace-bindings" workspace_conf "github.com/databricks/cli/cmd/workspace/workspace-conf" + "github.com/spf13/cobra" ) -func init() { - root.RootCmd.AddCommand(alerts.Cmd) - root.RootCmd.AddCommand(catalogs.Cmd) - root.RootCmd.AddCommand(clean_rooms.Cmd) - root.RootCmd.AddCommand(cluster_policies.Cmd) - root.RootCmd.AddCommand(clusters.Cmd) - root.RootCmd.AddCommand(connections.Cmd) - root.RootCmd.AddCommand(current_user.Cmd) - root.RootCmd.AddCommand(dashboards.Cmd) - root.RootCmd.AddCommand(data_sources.Cmd) - root.RootCmd.AddCommand(experiments.Cmd) - root.RootCmd.AddCommand(external_locations.Cmd) - root.RootCmd.AddCommand(functions.Cmd) - root.RootCmd.AddCommand(git_credentials.Cmd) - root.RootCmd.AddCommand(global_init_scripts.Cmd) - root.RootCmd.AddCommand(grants.Cmd) - root.RootCmd.AddCommand(groups.Cmd) - root.RootCmd.AddCommand(instance_pools.Cmd) - root.RootCmd.AddCommand(instance_profiles.Cmd) - root.RootCmd.AddCommand(ip_access_lists.Cmd) - root.RootCmd.AddCommand(jobs.Cmd) - root.RootCmd.AddCommand(libraries.Cmd) - root.RootCmd.AddCommand(metastores.Cmd) - root.RootCmd.AddCommand(model_registry.Cmd) - root.RootCmd.AddCommand(permissions.Cmd) - root.RootCmd.AddCommand(pipelines.Cmd) - root.RootCmd.AddCommand(policy_families.Cmd) - root.RootCmd.AddCommand(providers.Cmd) - root.RootCmd.AddCommand(queries.Cmd) - root.RootCmd.AddCommand(query_history.Cmd) - root.RootCmd.AddCommand(recipient_activation.Cmd) - root.RootCmd.AddCommand(recipients.Cmd) - root.RootCmd.AddCommand(repos.Cmd) - root.RootCmd.AddCommand(schemas.Cmd) - root.RootCmd.AddCommand(secrets.Cmd) - root.RootCmd.AddCommand(service_principals.Cmd) - root.RootCmd.AddCommand(serving_endpoints.Cmd) - root.RootCmd.AddCommand(shares.Cmd) - root.RootCmd.AddCommand(storage_credentials.Cmd) - root.RootCmd.AddCommand(system_schemas.Cmd) - root.RootCmd.AddCommand(table_constraints.Cmd) - root.RootCmd.AddCommand(tables.Cmd) - root.RootCmd.AddCommand(token_management.Cmd) - root.RootCmd.AddCommand(tokens.Cmd) - root.RootCmd.AddCommand(users.Cmd) - root.RootCmd.AddCommand(volumes.Cmd) - root.RootCmd.AddCommand(warehouses.Cmd) - root.RootCmd.AddCommand(workspace.Cmd) - root.RootCmd.AddCommand(workspace_bindings.Cmd) - root.RootCmd.AddCommand(workspace_conf.Cmd) +func All() []*cobra.Command { + var out []*cobra.Command - // Register commands with groups - alerts.Cmd.GroupID = "sql" - catalogs.Cmd.GroupID = "catalog" - clean_rooms.Cmd.GroupID = "sharing" - cluster_policies.Cmd.GroupID = "compute" - clusters.Cmd.GroupID = "compute" - connections.Cmd.GroupID = "catalog" - current_user.Cmd.GroupID = "iam" - dashboards.Cmd.GroupID = "sql" - data_sources.Cmd.GroupID = "sql" - experiments.Cmd.GroupID = "ml" - external_locations.Cmd.GroupID = "catalog" - functions.Cmd.GroupID = "catalog" - git_credentials.Cmd.GroupID = "workspace" - global_init_scripts.Cmd.GroupID = "compute" - grants.Cmd.GroupID = "catalog" - groups.Cmd.GroupID = "iam" - instance_pools.Cmd.GroupID = "compute" - instance_profiles.Cmd.GroupID = "compute" - ip_access_lists.Cmd.GroupID = "settings" - jobs.Cmd.GroupID = "jobs" - libraries.Cmd.GroupID = "compute" - metastores.Cmd.GroupID = "catalog" - model_registry.Cmd.GroupID = "ml" - permissions.Cmd.GroupID = "iam" - pipelines.Cmd.GroupID = "pipelines" - policy_families.Cmd.GroupID = "compute" - providers.Cmd.GroupID = "sharing" - queries.Cmd.GroupID = "sql" - query_history.Cmd.GroupID = "sql" - recipient_activation.Cmd.GroupID = "sharing" - recipients.Cmd.GroupID = "sharing" - repos.Cmd.GroupID = "workspace" - schemas.Cmd.GroupID = "catalog" - secrets.Cmd.GroupID = "workspace" - service_principals.Cmd.GroupID = "iam" - serving_endpoints.Cmd.GroupID = "serving" - shares.Cmd.GroupID = "sharing" - storage_credentials.Cmd.GroupID = "catalog" - system_schemas.Cmd.GroupID = "catalog" - table_constraints.Cmd.GroupID = "catalog" - tables.Cmd.GroupID = "catalog" - token_management.Cmd.GroupID = "settings" - tokens.Cmd.GroupID = "settings" - users.Cmd.GroupID = "iam" - volumes.Cmd.GroupID = "catalog" - warehouses.Cmd.GroupID = "sql" - workspace.Cmd.GroupID = "workspace" - workspace_bindings.Cmd.GroupID = "catalog" - workspace_conf.Cmd.GroupID = "settings" + out = append(out, alerts.New()) + out = append(out, catalogs.New()) + out = append(out, clean_rooms.New()) + out = append(out, cluster_policies.New()) + out = append(out, clusters.New()) + out = append(out, connections.New()) + out = append(out, current_user.New()) + out = append(out, dashboards.New()) + out = append(out, data_sources.New()) + out = append(out, experiments.New()) + out = append(out, external_locations.New()) + out = append(out, functions.New()) + out = append(out, git_credentials.New()) + out = append(out, global_init_scripts.New()) + out = append(out, grants.New()) + out = append(out, groups.New()) + out = append(out, instance_pools.New()) + out = append(out, instance_profiles.New()) + out = append(out, ip_access_lists.New()) + out = append(out, jobs.New()) + out = append(out, libraries.New()) + out = append(out, metastores.New()) + out = append(out, model_registry.New()) + out = append(out, permissions.New()) + out = append(out, pipelines.New()) + out = append(out, policy_families.New()) + out = append(out, providers.New()) + out = append(out, queries.New()) + out = append(out, query_history.New()) + out = append(out, recipient_activation.New()) + out = append(out, recipients.New()) + out = append(out, repos.New()) + out = append(out, schemas.New()) + out = append(out, secrets.New()) + out = append(out, service_principals.New()) + out = append(out, serving_endpoints.New()) + out = append(out, shares.New()) + out = append(out, storage_credentials.New()) + out = append(out, system_schemas.New()) + out = append(out, table_constraints.New()) + out = append(out, tables.New()) + out = append(out, token_management.New()) + out = append(out, tokens.New()) + out = append(out, users.New()) + out = append(out, volumes.New()) + out = append(out, warehouses.New()) + out = append(out, workspace.New()) + out = append(out, workspace_bindings.New()) + out = append(out, workspace_conf.New()) + return out } diff --git a/cmd/workspace/connections/connections.go b/cmd/workspace/connections/connections.go index 146fdba9d..89636b594 100755 --- a/cmd/workspace/connections/connections.go +++ b/cmd/workspace/connections/connections.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "connections", - Short: `Connections allow for creating a connection to an external data source.`, - Long: `Connections allow for creating a connection to an external data source. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "connections", + Short: `Connections allow for creating a connection to an external data source.`, + Long: `Connections allow for creating a connection to an external data source. A connection is an abstraction of an external data source that can be connected from Databricks Compute. Creating a connection object is the first @@ -26,44 +31,60 @@ var Cmd = &cobra.Command{ may create different types of connections with each connection having a unique set of configuration options to support credential management and other settings.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateConnection -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateConnection, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateConnection + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of current owner of the connection.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of current owner of the connection.`) // TODO: map via StringToStringVar: properties_kvpairs - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `If the connection is read only.`) + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `If the connection is read only.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a connection.`, - Long: `Create a connection. + cmd.Use = "create" + cmd.Short = `Create a connection.` + cmd.Long = `Create a connection. Creates a new connection Creates a new connection to an external data source. It allows users to specify connection details and configurations for interaction with the - external server.`, + external server.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,51 +102,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteConnectionRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteConnectionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteConnectionRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME_ARG", - Short: `Delete a connection.`, - Long: `Delete a connection. + cmd.Use = "delete NAME_ARG" + cmd.Short = `Delete a connection.` + cmd.Long = `Delete a connection. - Deletes the connection that matches the supplied name.`, + Deletes the connection that matches the supplied name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME_ARG argument specified. Loading names for Connections drop-down." - names, err := w.Connections.ConnectionInfoNameToFullNameMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Connections drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The name of the connection to be deleted") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the name of the connection to be deleted") - } deleteReq.NameArg = args[0] err = w.Connections.Delete(ctx, deleteReq) @@ -133,51 +163,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetConnectionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetConnectionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetConnectionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME_ARG", - Short: `Get a connection.`, - Long: `Get a connection. + cmd.Use = "get NAME_ARG" + cmd.Short = `Get a connection.` + cmd.Long = `Get a connection. - Gets a connection from it's name.`, + Gets a connection from it's name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME_ARG argument specified. Loading names for Connections drop-down." - names, err := w.Connections.ConnectionInfoNameToFullNameMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Connections drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the connection") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the connection") - } getReq.NameArg = args[0] response, err := w.Connections.Get(ctx, getReq) @@ -185,29 +224,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List connections.`, - Long: `List connections. + cmd.Use = "list" + cmd.Short = `List connections.` + cmd.Long = `List connections. - List all connections.`, + List all connections.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Connections.ListAll(ctx) @@ -215,33 +272,54 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateConnection -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateConnection, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateConnection + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update a connection.`, - Long: `Update a connection. + cmd.Use = "update" + cmd.Short = `Update a connection.` + cmd.Long = `Update a connection. - Updates the connection that matches the supplied name.`, + Updates the connection that matches the supplied name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -259,10 +337,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Connections diff --git a/cmd/workspace/current-user/current-user.go b/cmd/workspace/current-user/current-user.go index 80e539ec9..cb18e71d2 100755 --- a/cmd/workspace/current-user/current-user.go +++ b/cmd/workspace/current-user/current-user.go @@ -8,33 +8,51 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "current-user", - Short: `This API allows retrieving information about currently authenticated user or service principal.`, - Long: `This API allows retrieving information about currently authenticated user or +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "current-user", + Short: `This API allows retrieving information about currently authenticated user or service principal.`, + Long: `This API allows retrieving information about currently authenticated user or service principal.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start me command -func init() { - Cmd.AddCommand(meCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var meOverrides []func( + *cobra.Command, +) -} +func newMe() *cobra.Command { + cmd := &cobra.Command{} -var meCmd = &cobra.Command{ - Use: "me", - Short: `Get current user info.`, - Long: `Get current user info. + cmd.Use = "me" + cmd.Short = `Get current user info.` + cmd.Long = `Get current user info. - Get details about the current method caller's identity.`, + Get details about the current method caller's identity.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.CurrentUser.Me(ctx) @@ -42,10 +60,24 @@ var meCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range meOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newMe()) + }) } // end service CurrentUser diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index 014be02f8..03796c2f2 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -12,46 +12,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "dashboards", - Short: `In general, there is little need to modify dashboards using the API.`, - Long: `In general, there is little need to modify dashboards using the API. However, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "dashboards", + Short: `In general, there is little need to modify dashboards using the API.`, + Long: `In general, there is little need to modify dashboards using the API. However, it can be useful to use dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it to create a new one. Dashboards can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateDashboardRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateDashboardRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateDashboardRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} + cmd.Use = "create" + cmd.Short = `Create a dashboard object.` + cmd.Long = `Create a dashboard object.` -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a dashboard object.`, - Long: `Create a dashboard object.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -69,52 +91,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteDashboardRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteDashboardRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteDashboardRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete DASHBOARD_ID", - Short: `Remove a dashboard.`, - Long: `Remove a dashboard. + cmd.Use = "delete DASHBOARD_ID" + cmd.Short = `Remove a dashboard.` + cmd.Long = `Remove a dashboard. Moves a dashboard to the trash. Trashed dashboards do not appear in list views - or searches, and cannot be shared.`, + or searches, and cannot be shared.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.DashboardId = args[0] err = w.Dashboards.Delete(ctx, deleteReq) @@ -122,52 +153,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetDashboardRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetDashboardRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetDashboardRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get DASHBOARD_ID", - Short: `Retrieve a definition.`, - Long: `Retrieve a definition. + cmd.Use = "get DASHBOARD_ID" + cmd.Short = `Retrieve a definition.` + cmd.Long = `Retrieve a definition. Returns a JSON representation of a dashboard object, including its - visualization and query objects.`, + visualization and query objects.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.DashboardId = args[0] response, err := w.Dashboards.Get(ctx, getReq) @@ -175,45 +215,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sql.ListDashboardsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListDashboardsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListDashboardsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Var(&listReq.Order, "order", `Name of dashboard attribute to order by.`) - listCmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) - listCmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of dashboards to return per page.`) - listCmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) + cmd.Flags().Var(&listReq.Order, "order", `Name of dashboard attribute to order by.`) + cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of dashboards to return per page.`) + cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get dashboard objects.`, - Long: `Get dashboard objects. + cmd.Use = "list" + cmd.Short = `Get dashboard objects.` + cmd.Long = `Get dashboard objects. - Fetch a paginated list of dashboard objects.`, + Fetch a paginated list of dashboard objects.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -230,51 +292,60 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start restore command -var restoreReq sql.RestoreDashboardRequest -func init() { - Cmd.AddCommand(restoreCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreOverrides []func( + *cobra.Command, + *sql.RestoreDashboardRequest, +) + +func newRestore() *cobra.Command { + cmd := &cobra.Command{} + + var restoreReq sql.RestoreDashboardRequest + // TODO: short flags -} - -var restoreCmd = &cobra.Command{ - Use: "restore DASHBOARD_ID", - Short: `Restore a dashboard.`, - Long: `Restore a dashboard. + cmd.Use = "restore DASHBOARD_ID" + cmd.Short = `Restore a dashboard.` + cmd.Long = `Restore a dashboard. - A restored dashboard appears in list views and searches and can be shared.`, + A restored dashboard appears in list views and searches and can be shared.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." - names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } restoreReq.DashboardId = args[0] err = w.Dashboards.Restore(ctx, restoreReq) @@ -282,10 +353,24 @@ var restoreCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreOverrides { + fn(cmd, &restoreReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestore()) + }) } // end service Dashboards diff --git a/cmd/workspace/dashboards/overrides.go b/cmd/workspace/dashboards/overrides.go index ba7e42ec7..709e657f8 100644 --- a/cmd/workspace/dashboards/overrides.go +++ b/cmd/workspace/dashboards/overrides.go @@ -1,10 +1,18 @@ package dashboards -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, _ *sql.ListDashboardsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{range .}}{{.Id|green}} {{.Name}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/data-sources/data-sources.go b/cmd/workspace/data-sources/data-sources.go index 513ce2144..969399f42 100755 --- a/cmd/workspace/data-sources/data-sources.go +++ b/cmd/workspace/data-sources/data-sources.go @@ -8,10 +8,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "data-sources", - Short: `This API is provided to assist you in making new query objects.`, - Long: `This API is provided to assist you in making new query objects. When creating +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "data-sources", + Short: `This API is provided to assist you in making new query objects.`, + Long: `This API is provided to assist you in making new query objects. When creating a query object, you may optionally specify a data_source_id for the SQL warehouse against which it will run. If you don't already know the data_source_id for your desired SQL warehouse, this API will help you find @@ -21,30 +26,43 @@ var Cmd = &cobra.Command{ in your workspace. We advise you to use any text editor, REST client, or grep to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a list of SQL warehouses.`, - Long: `Get a list of SQL warehouses. + cmd.Use = "list" + cmd.Short = `Get a list of SQL warehouses.` + cmd.Long = `Get a list of SQL warehouses. Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this API response are enumerated for clarity. However, - you need only a SQL warehouse's id to create new queries against it.`, + you need only a SQL warehouse's id to create new queries against it.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.DataSources.List(ctx) @@ -52,10 +70,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service DataSources diff --git a/cmd/workspace/experiments/experiments.go b/cmd/workspace/experiments/experiments.go index a95da2f5c..1e2ff9fa1 100755 --- a/cmd/workspace/experiments/experiments.go +++ b/cmd/workspace/experiments/experiments.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "experiments", - Short: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.`, - Long: `Experiments are the primary unit of organization in MLflow; all MLflow runs +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "experiments", + Short: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment.`, + Long: `Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking @@ -24,47 +29,64 @@ var Cmd = &cobra.Command{ Experiments are located in the workspace file tree. You manage experiments using the same tools you use to manage other workspace objects such as folders, notebooks, and libraries.`, - Annotations: map[string]string{ - "package": "ml", - }, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-experiment command -var createExperimentReq ml.CreateExperiment -var createExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(createExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createExperimentOverrides []func( + *cobra.Command, + *ml.CreateExperiment, +) + +func newCreateExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var createExperimentReq ml.CreateExperiment + var createExperimentJson flags.JsonFlag + // TODO: short flags - createExperimentCmd.Flags().Var(&createExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createExperimentCmd.Flags().StringVar(&createExperimentReq.ArtifactLocation, "artifact-location", createExperimentReq.ArtifactLocation, `Location where all artifacts for the experiment are stored.`) + cmd.Flags().StringVar(&createExperimentReq.ArtifactLocation, "artifact-location", createExperimentReq.ArtifactLocation, `Location where all artifacts for the experiment are stored.`) // TODO: array: tags -} - -var createExperimentCmd = &cobra.Command{ - Use: "create-experiment NAME", - Short: `Create experiment.`, - Long: `Create experiment. + cmd.Use = "create-experiment NAME" + cmd.Short = `Create experiment.` + cmd.Long = `Create experiment. Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that another experiment with the same name does not already exist and fails if another experiment with the same name already exists. - Throws RESOURCE_ALREADY_EXISTS if a experiment with the given name exists.`, + Throws RESOURCE_ALREADY_EXISTS if a experiment with the given name exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -82,48 +104,70 @@ var createExperimentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createExperimentOverrides { + fn(cmd, &createExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateExperiment()) + }) } // start create-run command -var createRunReq ml.CreateRun -var createRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(createRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createRunOverrides []func( + *cobra.Command, + *ml.CreateRun, +) + +func newCreateRun() *cobra.Command { + cmd := &cobra.Command{} + + var createRunReq ml.CreateRun + var createRunJson flags.JsonFlag + // TODO: short flags - createRunCmd.Flags().Var(&createRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createRunCmd.Flags().StringVar(&createRunReq.ExperimentId, "experiment-id", createRunReq.ExperimentId, `ID of the associated experiment.`) - createRunCmd.Flags().Int64Var(&createRunReq.StartTime, "start-time", createRunReq.StartTime, `Unix timestamp in milliseconds of when the run started.`) + cmd.Flags().StringVar(&createRunReq.ExperimentId, "experiment-id", createRunReq.ExperimentId, `ID of the associated experiment.`) + cmd.Flags().Int64Var(&createRunReq.StartTime, "start-time", createRunReq.StartTime, `Unix timestamp in milliseconds of when the run started.`) // TODO: array: tags - createRunCmd.Flags().StringVar(&createRunReq.UserId, "user-id", createRunReq.UserId, `ID of the user executing the run.`) + cmd.Flags().StringVar(&createRunReq.UserId, "user-id", createRunReq.UserId, `ID of the user executing the run.`) -} - -var createRunCmd = &cobra.Command{ - Use: "create-run", - Short: `Create a run.`, - Long: `Create a run. + cmd.Use = "create-run" + cmd.Short = `Create a run.` + cmd.Long = `Create a run. Creates a new run within an experiment. A run is usually a single execution of a machine learning or data ETL pipeline. MLflow uses runs to track the mlflowParam, mlflowMetric and mlflowRunTag associated with a single - execution.`, + execution.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -140,42 +184,64 @@ var createRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createRunOverrides { + fn(cmd, &createRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateRun()) + }) } // start delete-experiment command -var deleteExperimentReq ml.DeleteExperiment -var deleteExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteExperimentOverrides []func( + *cobra.Command, + *ml.DeleteExperiment, +) + +func newDeleteExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var deleteExperimentReq ml.DeleteExperiment + var deleteExperimentJson flags.JsonFlag + // TODO: short flags - deleteExperimentCmd.Flags().Var(&deleteExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteExperimentCmd = &cobra.Command{ - Use: "delete-experiment EXPERIMENT_ID", - Short: `Delete an experiment.`, - Long: `Delete an experiment. + cmd.Use = "delete-experiment EXPERIMENT_ID" + cmd.Short = `Delete an experiment.` + cmd.Long = `Delete an experiment. Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the experiment uses FileStore, artifacts associated with - experiment are also deleted.`, + experiment are also deleted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -193,40 +259,62 @@ var deleteExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteExperimentOverrides { + fn(cmd, &deleteExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteExperiment()) + }) } // start delete-run command -var deleteRunReq ml.DeleteRun -var deleteRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunOverrides []func( + *cobra.Command, + *ml.DeleteRun, +) + +func newDeleteRun() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunReq ml.DeleteRun + var deleteRunJson flags.JsonFlag + // TODO: short flags - deleteRunCmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteRunCmd = &cobra.Command{ - Use: "delete-run RUN_ID", - Short: `Delete a run.`, - Long: `Delete a run. + cmd.Use = "delete-run RUN_ID" + cmd.Short = `Delete a run.` + cmd.Long = `Delete a run. - Marks a run for deletion.`, + Marks a run for deletion.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -244,41 +332,63 @@ var deleteRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteRunOverrides { + fn(cmd, &deleteRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRun()) + }) } // start delete-tag command -var deleteTagReq ml.DeleteTag -var deleteTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteTagOverrides []func( + *cobra.Command, + *ml.DeleteTag, +) + +func newDeleteTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteTagReq ml.DeleteTag + var deleteTagJson flags.JsonFlag + // TODO: short flags - deleteTagCmd.Flags().Var(&deleteTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteTagCmd = &cobra.Command{ - Use: "delete-tag RUN_ID KEY", - Short: `Delete a tag.`, - Long: `Delete a tag. + cmd.Use = "delete-tag RUN_ID KEY" + cmd.Short = `Delete a tag.` + cmd.Long = `Delete a tag. Deletes a tag on a run. Tags are run metadata that can be updated during a run - and after a run completes.`, + and after a run completes.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -297,25 +407,45 @@ var deleteTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteTagOverrides { + fn(cmd, &deleteTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteTag()) + }) } // start get-by-name command -var getByNameReq ml.GetByNameRequest -func init() { - Cmd.AddCommand(getByNameCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getByNameOverrides []func( + *cobra.Command, + *ml.GetByNameRequest, +) + +func newGetByName() *cobra.Command { + cmd := &cobra.Command{} + + var getByNameReq ml.GetByNameRequest + // TODO: short flags -} - -var getByNameCmd = &cobra.Command{ - Use: "get-by-name EXPERIMENT_NAME", - Short: `Get metadata.`, - Long: `Get metadata. + cmd.Use = "get-by-name EXPERIMENT_NAME" + cmd.Short = `Get metadata.` + cmd.Long = `Get metadata. Gets metadata for an experiment. @@ -325,15 +455,17 @@ var getByNameCmd = &cobra.Command{ them. Throws RESOURCE_DOES_NOT_EXIST if no experiment with the specified name - exists.`, + exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -344,35 +476,57 @@ var getByNameCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getByNameOverrides { + fn(cmd, &getByNameReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetByName()) + }) } // start get-experiment command -var getExperimentReq ml.GetExperimentRequest -func init() { - Cmd.AddCommand(getExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getExperimentOverrides []func( + *cobra.Command, + *ml.GetExperimentRequest, +) + +func newGetExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var getExperimentReq ml.GetExperimentRequest + // TODO: short flags -} - -var getExperimentCmd = &cobra.Command{ - Use: "get-experiment EXPERIMENT_ID", - Short: `Get an experiment.`, - Long: `Get an experiment. + cmd.Use = "get-experiment EXPERIMENT_ID" + cmd.Short = `Get an experiment.` + cmd.Long = `Get an experiment. - Gets metadata for an experiment. This method works on deleted experiments.`, + Gets metadata for an experiment. This method works on deleted experiments.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -383,40 +537,62 @@ var getExperimentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getExperimentOverrides { + fn(cmd, &getExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetExperiment()) + }) } // start get-history command -var getHistoryReq ml.GetHistoryRequest -func init() { - Cmd.AddCommand(getHistoryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getHistoryOverrides []func( + *cobra.Command, + *ml.GetHistoryRequest, +) + +func newGetHistory() *cobra.Command { + cmd := &cobra.Command{} + + var getHistoryReq ml.GetHistoryRequest + // TODO: short flags - getHistoryCmd.Flags().IntVar(&getHistoryReq.MaxResults, "max-results", getHistoryReq.MaxResults, `Maximum number of Metric records to return per paginated request.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.PageToken, "page-token", getHistoryReq.PageToken, `Token indicating the page of metric histories to fetch.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.RunId, "run-id", getHistoryReq.RunId, `ID of the run from which to fetch metric values.`) - getHistoryCmd.Flags().StringVar(&getHistoryReq.RunUuid, "run-uuid", getHistoryReq.RunUuid, `[Deprecated, use run_id instead] ID of the run from which to fetch metric values.`) + cmd.Flags().IntVar(&getHistoryReq.MaxResults, "max-results", getHistoryReq.MaxResults, `Maximum number of Metric records to return per paginated request.`) + cmd.Flags().StringVar(&getHistoryReq.PageToken, "page-token", getHistoryReq.PageToken, `Token indicating the page of metric histories to fetch.`) + cmd.Flags().StringVar(&getHistoryReq.RunId, "run-id", getHistoryReq.RunId, `ID of the run from which to fetch metric values.`) + cmd.Flags().StringVar(&getHistoryReq.RunUuid, "run-uuid", getHistoryReq.RunUuid, `[Deprecated, use run_id instead] ID of the run from which to fetch metric values.`) -} - -var getHistoryCmd = &cobra.Command{ - Use: "get-history METRIC_KEY", - Short: `Get history of a given metric within a run.`, - Long: `Get history of a given metric within a run. + cmd.Use = "get-history METRIC_KEY" + cmd.Short = `Get history of a given metric within a run.` + cmd.Long = `Get history of a given metric within a run. - Gets a list of all values for the specified metric for a given run.`, + Gets a list of all values for the specified metric for a given run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -427,42 +603,64 @@ var getHistoryCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getHistoryOverrides { + fn(cmd, &getHistoryReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetHistory()) + }) } // start get-run command -var getRunReq ml.GetRunRequest -func init() { - Cmd.AddCommand(getRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOverrides []func( + *cobra.Command, + *ml.GetRunRequest, +) + +func newGetRun() *cobra.Command { + cmd := &cobra.Command{} + + var getRunReq ml.GetRunRequest + // TODO: short flags - getRunCmd.Flags().StringVar(&getRunReq.RunUuid, "run-uuid", getRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to fetch.`) + cmd.Flags().StringVar(&getRunReq.RunUuid, "run-uuid", getRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to fetch.`) -} - -var getRunCmd = &cobra.Command{ - Use: "get-run RUN_ID", - Short: `Get a run.`, - Long: `Get a run. + cmd.Use = "get-run RUN_ID" + cmd.Short = `Get a run.` + cmd.Long = `Get a run. Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the same key are logged for a run, return only the value with the latest timestamp. If there are multiple values with the latest timestamp, return the maximum of - these values.`, + these values.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -473,46 +671,68 @@ var getRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOverrides { + fn(cmd, &getRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRun()) + }) } // start list-artifacts command -var listArtifactsReq ml.ListArtifactsRequest -var listArtifactsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listArtifactsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listArtifactsOverrides []func( + *cobra.Command, + *ml.ListArtifactsRequest, +) + +func newListArtifacts() *cobra.Command { + cmd := &cobra.Command{} + + var listArtifactsReq ml.ListArtifactsRequest + var listArtifactsJson flags.JsonFlag + // TODO: short flags - listArtifactsCmd.Flags().Var(&listArtifactsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listArtifactsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.PageToken, "page-token", listArtifactsReq.PageToken, `Token indicating the page of artifact results to fetch.`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.Path, "path", listArtifactsReq.Path, `Filter artifacts matching this path (a relative path from the root artifact directory).`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.RunId, "run-id", listArtifactsReq.RunId, `ID of the run whose artifacts to list.`) - listArtifactsCmd.Flags().StringVar(&listArtifactsReq.RunUuid, "run-uuid", listArtifactsReq.RunUuid, `[Deprecated, use run_id instead] ID of the run whose artifacts to list.`) + cmd.Flags().StringVar(&listArtifactsReq.PageToken, "page-token", listArtifactsReq.PageToken, `Token indicating the page of artifact results to fetch.`) + cmd.Flags().StringVar(&listArtifactsReq.Path, "path", listArtifactsReq.Path, `Filter artifacts matching this path (a relative path from the root artifact directory).`) + cmd.Flags().StringVar(&listArtifactsReq.RunId, "run-id", listArtifactsReq.RunId, `ID of the run whose artifacts to list.`) + cmd.Flags().StringVar(&listArtifactsReq.RunUuid, "run-uuid", listArtifactsReq.RunUuid, `[Deprecated, use run_id instead] ID of the run whose artifacts to list.`) -} - -var listArtifactsCmd = &cobra.Command{ - Use: "list-artifacts", - Short: `Get all artifacts.`, - Long: `Get all artifacts. + cmd.Use = "list-artifacts" + cmd.Short = `Get all artifacts.` + cmd.Long = `Get all artifacts. List artifacts for a run. Takes an optional artifact_path prefix. If it is - specified, the response contains only artifacts with the specified prefix.",`, + specified, the response contains only artifacts with the specified prefix.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -529,44 +749,66 @@ var listArtifactsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listArtifactsOverrides { + fn(cmd, &listArtifactsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListArtifacts()) + }) } // start list-experiments command -var listExperimentsReq ml.ListExperimentsRequest -var listExperimentsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listExperimentsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listExperimentsOverrides []func( + *cobra.Command, + *ml.ListExperimentsRequest, +) + +func newListExperiments() *cobra.Command { + cmd := &cobra.Command{} + + var listExperimentsReq ml.ListExperimentsRequest + var listExperimentsJson flags.JsonFlag + // TODO: short flags - listExperimentsCmd.Flags().Var(&listExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listExperimentsCmd.Flags().IntVar(&listExperimentsReq.MaxResults, "max-results", listExperimentsReq.MaxResults, `Maximum number of experiments desired.`) - listExperimentsCmd.Flags().StringVar(&listExperimentsReq.PageToken, "page-token", listExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) - listExperimentsCmd.Flags().StringVar(&listExperimentsReq.ViewType, "view-type", listExperimentsReq.ViewType, `Qualifier for type of experiments to be returned.`) + cmd.Flags().IntVar(&listExperimentsReq.MaxResults, "max-results", listExperimentsReq.MaxResults, `Maximum number of experiments desired.`) + cmd.Flags().StringVar(&listExperimentsReq.PageToken, "page-token", listExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) + cmd.Flags().StringVar(&listExperimentsReq.ViewType, "view-type", listExperimentsReq.ViewType, `Qualifier for type of experiments to be returned.`) -} - -var listExperimentsCmd = &cobra.Command{ - Use: "list-experiments", - Short: `List experiments.`, - Long: `List experiments. + cmd.Use = "list-experiments" + cmd.Short = `List experiments.` + cmd.Long = `List experiments. - Gets a list of all experiments.`, + Gets a list of all experiments.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -583,32 +825,52 @@ var listExperimentsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listExperimentsOverrides { + fn(cmd, &listExperimentsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListExperiments()) + }) } // start log-batch command -var logBatchReq ml.LogBatch -var logBatchJson flags.JsonFlag -func init() { - Cmd.AddCommand(logBatchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logBatchOverrides []func( + *cobra.Command, + *ml.LogBatch, +) + +func newLogBatch() *cobra.Command { + cmd := &cobra.Command{} + + var logBatchReq ml.LogBatch + var logBatchJson flags.JsonFlag + // TODO: short flags - logBatchCmd.Flags().Var(&logBatchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logBatchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: metrics // TODO: array: params - logBatchCmd.Flags().StringVar(&logBatchReq.RunId, "run-id", logBatchReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logBatchReq.RunId, "run-id", logBatchReq.RunId, `ID of the run to log under.`) // TODO: array: tags -} - -var logBatchCmd = &cobra.Command{ - Use: "log-batch", - Short: `Log a batch.`, - Long: `Log a batch. + cmd.Use = "log-batch" + cmd.Short = `Log a batch.` + cmd.Long = `Log a batch. Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server will respond with an error (non-200 status code). @@ -646,18 +908,20 @@ var logBatchCmd = &cobra.Command{ The following limits also apply to metric, param, and tag keys and values: * Metric keys, param keys, and tag keys can be up to 250 characters in length - * Parameter and tag values can be up to 250 characters in length`, + * Parameter and tag values can be up to 250 characters in length` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -674,44 +938,66 @@ var logBatchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logBatchOverrides { + fn(cmd, &logBatchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogBatch()) + }) } // start log-inputs command -var logInputsReq ml.LogInputs -var logInputsJson flags.JsonFlag -func init() { - Cmd.AddCommand(logInputsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logInputsOverrides []func( + *cobra.Command, + *ml.LogInputs, +) + +func newLogInputs() *cobra.Command { + cmd := &cobra.Command{} + + var logInputsReq ml.LogInputs + var logInputsJson flags.JsonFlag + // TODO: short flags - logInputsCmd.Flags().Var(&logInputsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logInputsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: datasets - logInputsCmd.Flags().StringVar(&logInputsReq.RunId, "run-id", logInputsReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logInputsReq.RunId, "run-id", logInputsReq.RunId, `ID of the run to log under.`) -} - -var logInputsCmd = &cobra.Command{ - Use: "log-inputs", - Short: `Log inputs to a run.`, - Long: `Log inputs to a run. + cmd.Use = "log-inputs" + cmd.Short = `Log inputs to a run.` + cmd.Long = `Log inputs to a run. **NOTE:** Experimental: This API may change or be removed in a future release - without warning.`, + without warning.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -728,46 +1014,68 @@ var logInputsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logInputsOverrides { + fn(cmd, &logInputsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogInputs()) + }) } // start log-metric command -var logMetricReq ml.LogMetric -var logMetricJson flags.JsonFlag -func init() { - Cmd.AddCommand(logMetricCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logMetricOverrides []func( + *cobra.Command, + *ml.LogMetric, +) + +func newLogMetric() *cobra.Command { + cmd := &cobra.Command{} + + var logMetricReq ml.LogMetric + var logMetricJson flags.JsonFlag + // TODO: short flags - logMetricCmd.Flags().Var(&logMetricJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logMetricJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logMetricCmd.Flags().StringVar(&logMetricReq.RunId, "run-id", logMetricReq.RunId, `ID of the run under which to log the metric.`) - logMetricCmd.Flags().StringVar(&logMetricReq.RunUuid, "run-uuid", logMetricReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the metric.`) - logMetricCmd.Flags().Int64Var(&logMetricReq.Step, "step", logMetricReq.Step, `Step at which to log the metric.`) + cmd.Flags().StringVar(&logMetricReq.RunId, "run-id", logMetricReq.RunId, `ID of the run under which to log the metric.`) + cmd.Flags().StringVar(&logMetricReq.RunUuid, "run-uuid", logMetricReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the metric.`) + cmd.Flags().Int64Var(&logMetricReq.Step, "step", logMetricReq.Step, `Step at which to log the metric.`) -} - -var logMetricCmd = &cobra.Command{ - Use: "log-metric KEY VALUE TIMESTAMP", - Short: `Log a metric.`, - Long: `Log a metric. + cmd.Use = "log-metric KEY VALUE TIMESTAMP" + cmd.Short = `Log a metric.` + cmd.Long = `Log a metric. Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated timestamp. Examples include the various metrics that - represent ML model accuracy. A metric can be logged multiple times.`, + represent ML model accuracy. A metric can be logged multiple times.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -793,44 +1101,66 @@ var logMetricCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logMetricOverrides { + fn(cmd, &logMetricReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogMetric()) + }) } // start log-model command -var logModelReq ml.LogModel -var logModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(logModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logModelOverrides []func( + *cobra.Command, + *ml.LogModel, +) + +func newLogModel() *cobra.Command { + cmd := &cobra.Command{} + + var logModelReq ml.LogModel + var logModelJson flags.JsonFlag + // TODO: short flags - logModelCmd.Flags().Var(&logModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logModelCmd.Flags().StringVar(&logModelReq.ModelJson, "model-json", logModelReq.ModelJson, `MLmodel file in json format.`) - logModelCmd.Flags().StringVar(&logModelReq.RunId, "run-id", logModelReq.RunId, `ID of the run to log under.`) + cmd.Flags().StringVar(&logModelReq.ModelJson, "model-json", logModelReq.ModelJson, `MLmodel file in json format.`) + cmd.Flags().StringVar(&logModelReq.RunId, "run-id", logModelReq.RunId, `ID of the run to log under.`) -} - -var logModelCmd = &cobra.Command{ - Use: "log-model", - Short: `Log a model.`, - Long: `Log a model. + cmd.Use = "log-model" + cmd.Short = `Log a model.` + cmd.Long = `Log a model. **NOTE:** Experimental: This API may change or be removed in a future release - without warning.`, + without warning.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -847,46 +1177,68 @@ var logModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logModelOverrides { + fn(cmd, &logModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogModel()) + }) } // start log-param command -var logParamReq ml.LogParam -var logParamJson flags.JsonFlag -func init() { - Cmd.AddCommand(logParamCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logParamOverrides []func( + *cobra.Command, + *ml.LogParam, +) + +func newLogParam() *cobra.Command { + cmd := &cobra.Command{} + + var logParamReq ml.LogParam + var logParamJson flags.JsonFlag + // TODO: short flags - logParamCmd.Flags().Var(&logParamJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&logParamJson, "json", `either inline JSON string or @path/to/file.json with request body`) - logParamCmd.Flags().StringVar(&logParamReq.RunId, "run-id", logParamReq.RunId, `ID of the run under which to log the param.`) - logParamCmd.Flags().StringVar(&logParamReq.RunUuid, "run-uuid", logParamReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the param.`) + cmd.Flags().StringVar(&logParamReq.RunId, "run-id", logParamReq.RunId, `ID of the run under which to log the param.`) + cmd.Flags().StringVar(&logParamReq.RunUuid, "run-uuid", logParamReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the param.`) -} - -var logParamCmd = &cobra.Command{ - Use: "log-param KEY VALUE", - Short: `Log a param.`, - Long: `Log a param. + cmd.Use = "log-param KEY VALUE" + cmd.Short = `Log a param.` + cmd.Long = `Log a param. Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A param can be logged only - once for a run.`, + once for a run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -905,45 +1257,67 @@ var logParamCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logParamOverrides { + fn(cmd, &logParamReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogParam()) + }) } // start restore-experiment command -var restoreExperimentReq ml.RestoreExperiment -var restoreExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(restoreExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreExperimentOverrides []func( + *cobra.Command, + *ml.RestoreExperiment, +) + +func newRestoreExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var restoreExperimentReq ml.RestoreExperiment + var restoreExperimentJson flags.JsonFlag + // TODO: short flags - restoreExperimentCmd.Flags().Var(&restoreExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restoreExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var restoreExperimentCmd = &cobra.Command{ - Use: "restore-experiment EXPERIMENT_ID", - Short: `Restores an experiment.`, - Long: `Restores an experiment. + cmd.Use = "restore-experiment EXPERIMENT_ID" + cmd.Short = `Restores an experiment.` + cmd.Long = `Restores an experiment. Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics, params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are also restored. Throws RESOURCE_DOES_NOT_EXIST if experiment was never created or was - permanently deleted.`, + permanently deleted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -961,40 +1335,62 @@ var restoreExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreExperimentOverrides { + fn(cmd, &restoreExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreExperiment()) + }) } // start restore-run command -var restoreRunReq ml.RestoreRun -var restoreRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(restoreRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreRunOverrides []func( + *cobra.Command, + *ml.RestoreRun, +) + +func newRestoreRun() *cobra.Command { + cmd := &cobra.Command{} + + var restoreRunReq ml.RestoreRun + var restoreRunJson flags.JsonFlag + // TODO: short flags - restoreRunCmd.Flags().Var(&restoreRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&restoreRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var restoreRunCmd = &cobra.Command{ - Use: "restore-run RUN_ID", - Short: `Restore a run.`, - Long: `Restore a run. + cmd.Use = "restore-run RUN_ID" + cmd.Short = `Restore a run.` + cmd.Long = `Restore a run. - Restores a deleted run.`, + Restores a deleted run.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1012,46 +1408,68 @@ var restoreRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreRunOverrides { + fn(cmd, &restoreRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreRun()) + }) } // start search-experiments command -var searchExperimentsReq ml.SearchExperiments -var searchExperimentsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchExperimentsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchExperimentsOverrides []func( + *cobra.Command, + *ml.SearchExperiments, +) + +func newSearchExperiments() *cobra.Command { + cmd := &cobra.Command{} + + var searchExperimentsReq ml.SearchExperiments + var searchExperimentsJson flags.JsonFlag + // TODO: short flags - searchExperimentsCmd.Flags().Var(&searchExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchExperimentsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchExperimentsCmd.Flags().StringVar(&searchExperimentsReq.Filter, "filter", searchExperimentsReq.Filter, `String representing a SQL filter condition (e.g.`) - searchExperimentsCmd.Flags().Int64Var(&searchExperimentsReq.MaxResults, "max-results", searchExperimentsReq.MaxResults, `Maximum number of experiments desired.`) + cmd.Flags().StringVar(&searchExperimentsReq.Filter, "filter", searchExperimentsReq.Filter, `String representing a SQL filter condition (e.g.`) + cmd.Flags().Int64Var(&searchExperimentsReq.MaxResults, "max-results", searchExperimentsReq.MaxResults, `Maximum number of experiments desired.`) // TODO: array: order_by - searchExperimentsCmd.Flags().StringVar(&searchExperimentsReq.PageToken, "page-token", searchExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) - searchExperimentsCmd.Flags().Var(&searchExperimentsReq.ViewType, "view-type", `Qualifier for type of experiments to be returned.`) + cmd.Flags().StringVar(&searchExperimentsReq.PageToken, "page-token", searchExperimentsReq.PageToken, `Token indicating the page of experiments to fetch.`) + cmd.Flags().Var(&searchExperimentsReq.ViewType, "view-type", `Qualifier for type of experiments to be returned.`) -} - -var searchExperimentsCmd = &cobra.Command{ - Use: "search-experiments", - Short: `Search experiments.`, - Long: `Search experiments. + cmd.Use = "search-experiments" + cmd.Short = `Search experiments.` + cmd.Long = `Search experiments. - Searches for experiments that satisfy specified search criteria.`, + Searches for experiments that satisfy specified search criteria.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1068,49 +1486,71 @@ var searchExperimentsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchExperimentsOverrides { + fn(cmd, &searchExperimentsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchExperiments()) + }) } // start search-runs command -var searchRunsReq ml.SearchRuns -var searchRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchRunsOverrides []func( + *cobra.Command, + *ml.SearchRuns, +) + +func newSearchRuns() *cobra.Command { + cmd := &cobra.Command{} + + var searchRunsReq ml.SearchRuns + var searchRunsJson flags.JsonFlag + // TODO: short flags - searchRunsCmd.Flags().Var(&searchRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: experiment_ids - searchRunsCmd.Flags().StringVar(&searchRunsReq.Filter, "filter", searchRunsReq.Filter, `A filter expression over params, metrics, and tags, that allows returning a subset of runs.`) - searchRunsCmd.Flags().IntVar(&searchRunsReq.MaxResults, "max-results", searchRunsReq.MaxResults, `Maximum number of runs desired.`) + cmd.Flags().StringVar(&searchRunsReq.Filter, "filter", searchRunsReq.Filter, `A filter expression over params, metrics, and tags, that allows returning a subset of runs.`) + cmd.Flags().IntVar(&searchRunsReq.MaxResults, "max-results", searchRunsReq.MaxResults, `Maximum number of runs desired.`) // TODO: array: order_by - searchRunsCmd.Flags().StringVar(&searchRunsReq.PageToken, "page-token", searchRunsReq.PageToken, `Token for the current page of runs.`) - searchRunsCmd.Flags().Var(&searchRunsReq.RunViewType, "run-view-type", `Whether to display only active, only deleted, or all runs.`) + cmd.Flags().StringVar(&searchRunsReq.PageToken, "page-token", searchRunsReq.PageToken, `Token for the current page of runs.`) + cmd.Flags().Var(&searchRunsReq.RunViewType, "run-view-type", `Whether to display only active, only deleted, or all runs.`) -} - -var searchRunsCmd = &cobra.Command{ - Use: "search-runs", - Short: `Search for runs.`, - Long: `Search for runs. + cmd.Use = "search-runs" + cmd.Short = `Search for runs.` + cmd.Long = `Search for runs. Searches for runs that satisfy expressions. - Search expressions can use mlflowMetric and mlflowParam keys.",`, + Search expressions can use mlflowMetric and mlflowParam keys.",` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1127,40 +1567,62 @@ var searchRunsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchRunsOverrides { + fn(cmd, &searchRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchRuns()) + }) } // start set-experiment-tag command -var setExperimentTagReq ml.SetExperimentTag -var setExperimentTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setExperimentTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setExperimentTagOverrides []func( + *cobra.Command, + *ml.SetExperimentTag, +) + +func newSetExperimentTag() *cobra.Command { + cmd := &cobra.Command{} + + var setExperimentTagReq ml.SetExperimentTag + var setExperimentTagJson flags.JsonFlag + // TODO: short flags - setExperimentTagCmd.Flags().Var(&setExperimentTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setExperimentTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setExperimentTagCmd = &cobra.Command{ - Use: "set-experiment-tag EXPERIMENT_ID KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-experiment-tag EXPERIMENT_ID KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. - Sets a tag on an experiment. Experiment tags are metadata that can be updated.`, + Sets a tag on an experiment. Experiment tags are metadata that can be updated.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1180,44 +1642,66 @@ var setExperimentTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setExperimentTagOverrides { + fn(cmd, &setExperimentTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetExperimentTag()) + }) } // start set-tag command -var setTagReq ml.SetTag -var setTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setTagOverrides []func( + *cobra.Command, + *ml.SetTag, +) + +func newSetTag() *cobra.Command { + cmd := &cobra.Command{} + + var setTagReq ml.SetTag + var setTagJson flags.JsonFlag + // TODO: short flags - setTagCmd.Flags().Var(&setTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) - setTagCmd.Flags().StringVar(&setTagReq.RunId, "run-id", setTagReq.RunId, `ID of the run under which to log the tag.`) - setTagCmd.Flags().StringVar(&setTagReq.RunUuid, "run-uuid", setTagReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the tag.`) + cmd.Flags().StringVar(&setTagReq.RunId, "run-id", setTagReq.RunId, `ID of the run under which to log the tag.`) + cmd.Flags().StringVar(&setTagReq.RunUuid, "run-uuid", setTagReq.RunUuid, `[Deprecated, use run_id instead] ID of the run under which to log the tag.`) -} - -var setTagCmd = &cobra.Command{ - Use: "set-tag KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-tag KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. Sets a tag on a run. Tags are run metadata that can be updated during a run - and after a run completes.`, + and after a run completes.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1236,42 +1720,64 @@ var setTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setTagOverrides { + fn(cmd, &setTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetTag()) + }) } // start update-experiment command -var updateExperimentReq ml.UpdateExperiment -var updateExperimentJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateExperimentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateExperimentOverrides []func( + *cobra.Command, + *ml.UpdateExperiment, +) + +func newUpdateExperiment() *cobra.Command { + cmd := &cobra.Command{} + + var updateExperimentReq ml.UpdateExperiment + var updateExperimentJson flags.JsonFlag + // TODO: short flags - updateExperimentCmd.Flags().Var(&updateExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateExperimentJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateExperimentCmd.Flags().StringVar(&updateExperimentReq.NewName, "new-name", updateExperimentReq.NewName, `If provided, the experiment's name is changed to the new name.`) + cmd.Flags().StringVar(&updateExperimentReq.NewName, "new-name", updateExperimentReq.NewName, `If provided, the experiment's name is changed to the new name.`) -} - -var updateExperimentCmd = &cobra.Command{ - Use: "update-experiment EXPERIMENT_ID", - Short: `Update an experiment.`, - Long: `Update an experiment. + cmd.Use = "update-experiment EXPERIMENT_ID" + cmd.Short = `Update an experiment.` + cmd.Long = `Update an experiment. - Updates experiment metadata.`, + Updates experiment metadata.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1289,45 +1795,67 @@ var updateExperimentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateExperimentOverrides { + fn(cmd, &updateExperimentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateExperiment()) + }) } // start update-run command -var updateRunReq ml.UpdateRun -var updateRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateRunOverrides []func( + *cobra.Command, + *ml.UpdateRun, +) + +func newUpdateRun() *cobra.Command { + cmd := &cobra.Command{} + + var updateRunReq ml.UpdateRun + var updateRunJson flags.JsonFlag + // TODO: short flags - updateRunCmd.Flags().Var(&updateRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateRunCmd.Flags().Int64Var(&updateRunReq.EndTime, "end-time", updateRunReq.EndTime, `Unix timestamp in milliseconds of when the run ended.`) - updateRunCmd.Flags().StringVar(&updateRunReq.RunId, "run-id", updateRunReq.RunId, `ID of the run to update.`) - updateRunCmd.Flags().StringVar(&updateRunReq.RunUuid, "run-uuid", updateRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to update.`) - updateRunCmd.Flags().Var(&updateRunReq.Status, "status", `Updated status of the run.`) + cmd.Flags().Int64Var(&updateRunReq.EndTime, "end-time", updateRunReq.EndTime, `Unix timestamp in milliseconds of when the run ended.`) + cmd.Flags().StringVar(&updateRunReq.RunId, "run-id", updateRunReq.RunId, `ID of the run to update.`) + cmd.Flags().StringVar(&updateRunReq.RunUuid, "run-uuid", updateRunReq.RunUuid, `[Deprecated, use run_id instead] ID of the run to update.`) + cmd.Flags().Var(&updateRunReq.Status, "status", `Updated status of the run.`) -} - -var updateRunCmd = &cobra.Command{ - Use: "update-run", - Short: `Update a run.`, - Long: `Update a run. + cmd.Use = "update-run" + cmd.Short = `Update a run.` + cmd.Long = `Update a run. - Updates run metadata.`, + Updates run metadata.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1344,10 +1872,24 @@ var updateRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateRunOverrides { + fn(cmd, &updateRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateRun()) + }) } // end service Experiments diff --git a/cmd/workspace/external-locations/external-locations.go b/cmd/workspace/external-locations/external-locations.go index a739c931c..db6153df0 100755 --- a/cmd/workspace/external-locations/external-locations.go +++ b/cmd/workspace/external-locations/external-locations.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "external-locations", - Short: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.`, - Long: `An external location is an object that combines a cloud storage path with a +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "external-locations", + Short: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path.`, + Long: `An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path. Each external location is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not @@ -26,45 +31,62 @@ var Cmd = &cobra.Command{ To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateExternalLocation -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateExternalLocation, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateExternalLocation + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) - createCmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) + cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME URL CREDENTIAL_NAME", - Short: `Create an external location.`, - Long: `Create an external location. + cmd.Use = "create NAME URL CREDENTIAL_NAME" + cmd.Short = `Create an external location.` + cmd.Long = `Create an external location. Creates a new external location entry in the metastore. The caller must be a metastore admin or have the **CREATE_EXTERNAL_LOCATION** privilege on both the - metastore and the associated storage credential.`, + metastore and the associated storage credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -84,38 +106,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteExternalLocationRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteExternalLocationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteExternalLocationRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external tables or mounts.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external tables or mounts.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete an external location.`, - Long: `Delete an external location. + cmd.Use = "delete NAME" + cmd.Short = `Delete an external location.` + cmd.Long = `Delete an external location. Deletes the specified external location from the metastore. The caller must be - the owner of the external location.`, + the owner of the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -126,37 +170,59 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetExternalLocationRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetExternalLocationRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetExternalLocationRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get an external location.`, - Long: `Get an external location. + cmd.Use = "get NAME" + cmd.Short = `Get an external location.` + cmd.Long = `Get an external location. Gets an external location from the metastore. The caller must be either a metastore admin, the owner of the external location, or a user that has some - privilege on the external location.`, + privilege on the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -167,32 +233,50 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List external locations.`, - Long: `List external locations. + cmd.Use = "list" + cmd.Short = `List external locations.` + cmd.Long = `List external locations. Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller must be a metastore admin, the owner of the external location, or a user that has some privilege on the external location. There is - no guarantee of a specific ordering of the elements in the array.`, + no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.ExternalLocations.ListAll(ctx) @@ -200,50 +284,72 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateExternalLocation -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateExternalLocation, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateExternalLocation + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) - updateCmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the external location.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) - updateCmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) - updateCmd.Flags().StringVar(&updateReq.Url, "url", updateReq.Url, `Path URL of the external location.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) + cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the external location.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) + cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Indicates whether the external location is read-only.`) + cmd.Flags().StringVar(&updateReq.Url, "url", updateReq.Url, `Path URL of the external location.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update an external location.`, - Long: `Update an external location. + cmd.Use = "update NAME" + cmd.Short = `Update an external location.` + cmd.Long = `Update an external location. Updates an external location in the metastore. The caller must be the owner of the external location, or be a metastore admin. In the second case, the admin - can only update the name of the external location.`, + can only update the name of the external location.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -261,10 +367,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service ExternalLocations diff --git a/cmd/workspace/external-locations/overrides.go b/cmd/workspace/external-locations/overrides.go index 7efd193d9..a271e5f65 100644 --- a/cmd/workspace/external-locations/overrides.go +++ b/cmd/workspace/external-locations/overrides.go @@ -1,10 +1,17 @@ package external_locations -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Name"}} {{header "Credential"}} {{header "URL"}} {{range .}}{{.Name|green}} {{.CredentialName|cyan}} {{.Url}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/functions/functions.go b/cmd/workspace/functions/functions.go index 9ce3f2faf..8b4a50ec5 100755 --- a/cmd/workspace/functions/functions.go +++ b/cmd/workspace/functions/functions.go @@ -12,51 +12,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "functions", - Short: `Functions implement User-Defined Functions (UDFs) in Unity Catalog.`, - Long: `Functions implement User-Defined Functions (UDFs) in Unity Catalog. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "functions", + Short: `Functions implement User-Defined Functions (UDFs) in Unity Catalog.`, + Long: `Functions implement User-Defined Functions (UDFs) in Unity Catalog. The function implementation can be any SQL expression or Query, and it can be invoked wherever a table reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it can be referenced with the form __catalog_name__.__schema_name__.__function_name__.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateFunction -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateFunction, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateFunction + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) - createCmd.Flags().StringVar(&createReq.ExternalLanguage, "external-language", createReq.ExternalLanguage, `External function language.`) - createCmd.Flags().StringVar(&createReq.ExternalName, "external-name", createReq.ExternalName, `External function name.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.ExternalLanguage, "external-language", createReq.ExternalLanguage, `External function language.`) + cmd.Flags().StringVar(&createReq.ExternalName, "external-name", createReq.ExternalName, `External function name.`) // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.SqlPath, "sql-path", createReq.SqlPath, `List of schemes whose objects can be referenced without qualification.`) + cmd.Flags().StringVar(&createReq.SqlPath, "sql-path", createReq.SqlPath, `List of schemes whose objects can be referenced without qualification.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a function.`, - Long: `Create a function. + cmd.Use = "create" + cmd.Short = `Create a function.` + cmd.Long = `Create a function. Creates a new function The user must have the following permissions in order for the function to be created: - **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** - and **CREATE_FUNCTION** on the function's parent schema`, + and **CREATE_FUNCTION** on the function's parent schema` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,58 +95,67 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteFunctionRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteFunctionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteFunctionRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the function is notempty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the function is notempty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a function.`, - Long: `Delete a function. + cmd.Use = "delete NAME" + cmd.Short = `Delete a function.` + cmd.Long = `Delete a function. Deletes the function that matches the supplied name. For the deletion to succeed, the user must satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and have both the **USE_CATALOG** privilege on - its parent catalog and the **USE_SCHEMA** privilege on its parent schema`, + its parent catalog and the **USE_SCHEMA** privilege on its parent schema` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." - names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - } deleteReq.Name = args[0] err = w.Functions.Delete(ctx, deleteReq) @@ -133,25 +163,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetFunctionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetFunctionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetFunctionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a function.`, - Long: `Get a function. + cmd.Use = "get NAME" + cmd.Short = `Get a function.` + cmd.Long = `Get a function. Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must satisfy one of the following requirements: - Is a @@ -159,31 +209,20 @@ var getCmd = &cobra.Command{ **USE_CATALOG** privilege on the function's parent catalog and be the owner of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the **USE_SCHEMA** privilege on the function's parent schema, and the - **EXECUTE** privilege on the function itself`, + **EXECUTE** privilege on the function itself` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." - names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - } getReq.Name = args[0] response, err := w.Functions.Get(ctx, getReq) @@ -191,25 +230,45 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListFunctionsRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListFunctionsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListFunctionsRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List functions.`, - Long: `List functions. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List functions.` + cmd.Long = `List functions. List functions within the specified parent catalog and schema. If the user is a metastore admin, all functions are returned in the output list. Otherwise, @@ -217,15 +276,17 @@ var listCmd = &cobra.Command{ **USE_SCHEMA** privilege on the schema, and the output list contains only functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is no guarantee of a specific ordering of the elements in - the array.`, + the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -237,27 +298,47 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateFunction -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateFunction, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateFunction + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of function.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of function.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a function.`, - Long: `Update a function. + cmd.Use = "update NAME" + cmd.Short = `Update a function.` + cmd.Long = `Update a function. Updates the function that matches the supplied name. Only the owner of the function can be updated. If the user is not a metastore admin, the user must @@ -266,31 +347,20 @@ var updateCmd = &cobra.Command{ function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the **USE_SCHEMA** privilege on the - function's parent schema.`, + function's parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." - names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") - } updateReq.Name = args[0] response, err := w.Functions.Update(ctx, updateReq) @@ -298,10 +368,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Functions diff --git a/cmd/workspace/git-credentials/git-credentials.go b/cmd/workspace/git-credentials/git-credentials.go index f75ed83e0..7e61b4c4f 100755 --- a/cmd/workspace/git-credentials/git-credentials.go +++ b/cmd/workspace/git-credentials/git-credentials.go @@ -12,54 +12,76 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "git-credentials", - Short: `Registers personal access token for Databricks to do operations on behalf of the user.`, - Long: `Registers personal access token for Databricks to do operations on behalf of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "git-credentials", + Short: `Registers personal access token for Databricks to do operations on behalf of the user.`, + Long: `Registers personal access token for Databricks to do operations on behalf of the user. See [more info]. [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq workspace.CreateCredentials -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *workspace.CreateCredentials, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq workspace.CreateCredentials + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.GitUsername, "git-username", createReq.GitUsername, `Git username.`) - createCmd.Flags().StringVar(&createReq.PersonalAccessToken, "personal-access-token", createReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) + cmd.Flags().StringVar(&createReq.GitUsername, "git-username", createReq.GitUsername, `Git username.`) + cmd.Flags().StringVar(&createReq.PersonalAccessToken, "personal-access-token", createReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) -} - -var createCmd = &cobra.Command{ - Use: "create GIT_PROVIDER", - Short: `Create a credential entry.`, - Long: `Create a credential entry. + cmd.Use = "create GIT_PROVIDER" + cmd.Short = `Create a credential entry.` + cmd.Long = `Create a credential entry. Creates a Git credential entry for the user. Only one Git credential per user is supported, so any attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update existing credentials, or the - DELETE endpoint to delete existing credentials.`, + DELETE endpoint to delete existing credentials.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -77,51 +99,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq workspace.DeleteGitCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.DeleteGitCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.DeleteGitCredentialRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete CREDENTIAL_ID", - Short: `Delete a credential.`, - Long: `Delete a credential. + cmd.Use = "delete CREDENTIAL_ID" + cmd.Short = `Delete a credential.` + cmd.Long = `Delete a credential. - Deletes the specified Git credential.`, + Deletes the specified Git credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." - names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding credential to access") - } _, err = fmt.Sscan(args[0], &deleteReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) @@ -132,51 +163,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq workspace.GetGitCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *workspace.GetGitCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq workspace.GetGitCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get CREDENTIAL_ID", - Short: `Get a credential entry.`, - Long: `Get a credential entry. + cmd.Use = "get CREDENTIAL_ID" + cmd.Short = `Get a credential entry.` + cmd.Long = `Get a credential entry. - Gets the Git credential with the specified credential ID.`, + Gets the Git credential with the specified credential ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." - names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding credential to access") - } _, err = fmt.Sscan(args[0], &getReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) @@ -187,30 +227,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get Git credentials.`, - Long: `Get Git credentials. + cmd.Use = "list" + cmd.Short = `Get Git credentials.` + cmd.Long = `Get Git credentials. Lists the calling user's Git credentials. One credential per user is - supported.`, + supported.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.GitCredentials.ListAll(ctx) @@ -218,55 +276,64 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq workspace.UpdateCredentials -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *workspace.UpdateCredentials, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq workspace.UpdateCredentials + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`) - updateCmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `Git username.`) - updateCmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) + cmd.Flags().StringVar(&updateReq.GitProvider, "git-provider", updateReq.GitProvider, `Git provider.`) + cmd.Flags().StringVar(&updateReq.GitUsername, "git-username", updateReq.GitUsername, `Git username.`) + cmd.Flags().StringVar(&updateReq.PersonalAccessToken, "personal-access-token", updateReq.PersonalAccessToken, `The personal access token used to authenticate to the corresponding Git provider.`) -} - -var updateCmd = &cobra.Command{ - Use: "update CREDENTIAL_ID", - Short: `Update a credential.`, - Long: `Update a credential. + cmd.Use = "update CREDENTIAL_ID" + cmd.Short = `Update a credential.` + cmd.Long = `Update a credential. - Updates the specified Git credential.`, + Updates the specified Git credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." - names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding credential to access") - } _, err = fmt.Sscan(args[0], &updateReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) @@ -277,10 +344,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service GitCredentials diff --git a/cmd/workspace/global-init-scripts/global-init-scripts.go b/cmd/workspace/global-init-scripts/global-init-scripts.go index b63338f6c..e7d734f06 100755 --- a/cmd/workspace/global-init-scripts/global-init-scripts.go +++ b/cmd/workspace/global-init-scripts/global-init-scripts.go @@ -3,8 +3,6 @@ package global_init_scripts import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "global-init-scripts", - Short: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.`, - Long: `The Global Init Scripts API enables Workspace administrators to configure +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "global-init-scripts", + Short: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace.`, + Long: `The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace. These scripts run on every node in every cluster in the workspace. @@ -24,42 +27,59 @@ var Cmd = &cobra.Command{ script returns with a bad exit code, the Apache Spark container fails to launch and init scripts with later position are skipped. If enough containers fail, the entire cluster fails with a GLOBAL_INIT_SCRIPT_FAILURE error code.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.GlobalInitScriptCreateRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.GlobalInitScriptCreateRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.GlobalInitScriptCreateRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Enabled, "enabled", createReq.Enabled, `Specifies whether the script is enabled.`) - createCmd.Flags().IntVar(&createReq.Position, "position", createReq.Position, `The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) + cmd.Flags().BoolVar(&createReq.Enabled, "enabled", createReq.Enabled, `Specifies whether the script is enabled.`) + cmd.Flags().IntVar(&createReq.Position, "position", createReq.Position, `The position of a global init script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME SCRIPT", - Short: `Create init script.`, - Long: `Create init script. + cmd.Use = "create NAME SCRIPT" + cmd.Short = `Create init script.` + cmd.Long = `Create init script. - Creates a new global init script in this workspace.`, + Creates a new global init script in this workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,51 +98,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteGlobalInitScriptRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteGlobalInitScriptRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeleteGlobalInitScriptRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete SCRIPT_ID", - Short: `Delete init script.`, - Long: `Delete init script. + cmd.Use = "delete SCRIPT_ID" + cmd.Short = `Delete init script.` + cmd.Long = `Delete init script. - Deletes a global init script.`, + Deletes a global init script.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No SCRIPT_ID argument specified. Loading names for Global Init Scripts drop-down." - names, err := w.GlobalInitScripts.GlobalInitScriptDetailsNameToScriptIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Global Init Scripts drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the global init script") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the global init script") - } deleteReq.ScriptId = args[0] err = w.GlobalInitScripts.Delete(ctx, deleteReq) @@ -130,51 +159,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq compute.GetGlobalInitScriptRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetGlobalInitScriptRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetGlobalInitScriptRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get SCRIPT_ID", - Short: `Get an init script.`, - Long: `Get an init script. + cmd.Use = "get SCRIPT_ID" + cmd.Short = `Get an init script.` + cmd.Long = `Get an init script. - Gets all the details of a script, including its Base64-encoded contents.`, + Gets all the details of a script, including its Base64-encoded contents.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No SCRIPT_ID argument specified. Loading names for Global Init Scripts drop-down." - names, err := w.GlobalInitScripts.GlobalInitScriptDetailsNameToScriptIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Global Init Scripts drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the global init script") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the global init script") - } getReq.ScriptId = args[0] response, err := w.GlobalInitScripts.Get(ctx, getReq) @@ -182,32 +220,50 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get init scripts.`, - Long: `Get init scripts. + cmd.Use = "list" + cmd.Short = `Get init scripts.` + cmd.Long = `Get init scripts. Get a list of all global init scripts for this workspace. This returns all properties for each script but **not** the script contents. To retrieve the contents of a script, use the [get a global init - script](#operation/get-script) operation.`, + script](#operation/get-script) operation.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.GlobalInitScripts.ListAll(ctx) @@ -215,39 +271,61 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq compute.GlobalInitScriptUpdateRequest -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *compute.GlobalInitScriptUpdateRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq compute.GlobalInitScriptUpdateRequest + // TODO: short flags - updateCmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether the script is enabled.`) - updateCmd.Flags().IntVar(&updateReq.Position, "position", updateReq.Position, `The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) + cmd.Flags().BoolVar(&updateReq.Enabled, "enabled", updateReq.Enabled, `Specifies whether the script is enabled.`) + cmd.Flags().IntVar(&updateReq.Position, "position", updateReq.Position, `The position of a script, where 0 represents the first script to run, 1 is the second script to run, in ascending order.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME SCRIPT SCRIPT_ID", - Short: `Update init script.`, - Long: `Update init script. + cmd.Use = "update NAME SCRIPT SCRIPT_ID" + cmd.Short = `Update init script.` + cmd.Long = `Update init script. Updates a global init script, specifying only the fields to change. All fields - are optional. Unspecified fields retain their current value.`, + are optional. Unspecified fields retain their current value.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -260,10 +338,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service GlobalInitScripts diff --git a/cmd/workspace/grants/grants.go b/cmd/workspace/grants/grants.go index 1cc973872..a5ebd7330 100755 --- a/cmd/workspace/grants/grants.go +++ b/cmd/workspace/grants/grants.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "grants", - Short: `In Unity Catalog, data is secure by default.`, - Long: `In Unity Catalog, data is secure by default. Initially, users have no access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "grants", + Short: `In Unity Catalog, data is secure by default.`, + Long: `In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore. Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are @@ -26,36 +31,53 @@ var Cmd = &cobra.Command{ automatically grants the privilege to all current and future objects within the catalog. Similarly, privileges granted on a schema are inherited by all current and future objects within that schema.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq catalog.GetGrantRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetGrantRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetGrantRequest + // TODO: short flags - getCmd.Flags().StringVar(&getReq.Principal, "principal", getReq.Principal, `If provided, only the permissions for the specified principal (user or group) are returned.`) + cmd.Flags().StringVar(&getReq.Principal, "principal", getReq.Principal, `If provided, only the permissions for the specified principal (user or group) are returned.`) -} - -var getCmd = &cobra.Command{ - Use: "get SECURABLE_TYPE FULL_NAME", - Short: `Get permissions.`, - Long: `Get permissions. + cmd.Use = "get SECURABLE_TYPE FULL_NAME" + cmd.Short = `Get permissions.` + cmd.Long = `Get permissions. - Gets the permissions for a securable.`, + Gets the permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,37 +92,59 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-effective command -var getEffectiveReq catalog.GetEffectiveRequest -func init() { - Cmd.AddCommand(getEffectiveCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getEffectiveOverrides []func( + *cobra.Command, + *catalog.GetEffectiveRequest, +) + +func newGetEffective() *cobra.Command { + cmd := &cobra.Command{} + + var getEffectiveReq catalog.GetEffectiveRequest + // TODO: short flags - getEffectiveCmd.Flags().StringVar(&getEffectiveReq.Principal, "principal", getEffectiveReq.Principal, `If provided, only the effective permissions for the specified principal (user or group) are returned.`) + cmd.Flags().StringVar(&getEffectiveReq.Principal, "principal", getEffectiveReq.Principal, `If provided, only the effective permissions for the specified principal (user or group) are returned.`) -} - -var getEffectiveCmd = &cobra.Command{ - Use: "get-effective SECURABLE_TYPE FULL_NAME", - Short: `Get effective permissions.`, - Long: `Get effective permissions. + cmd.Use = "get-effective SECURABLE_TYPE FULL_NAME" + cmd.Short = `Get effective permissions.` + cmd.Long = `Get effective permissions. - Gets the effective permissions for a securable.`, + Gets the effective permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -115,39 +159,61 @@ var getEffectiveCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getEffectiveOverrides { + fn(cmd, &getEffectiveReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetEffective()) + }) } // start update command -var updateReq catalog.UpdatePermissions -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdatePermissions, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdatePermissions + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: changes -} - -var updateCmd = &cobra.Command{ - Use: "update SECURABLE_TYPE FULL_NAME", - Short: `Update permissions.`, - Long: `Update permissions. + cmd.Use = "update SECURABLE_TYPE FULL_NAME" + cmd.Short = `Update permissions.` + cmd.Long = `Update permissions. - Updates the permissions for a securable.`, + Updates the permissions for a securable.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -168,10 +234,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Grants diff --git a/cmd/workspace/groups.go b/cmd/workspace/groups.go index 92b9ae946..43159d18b 100644 --- a/cmd/workspace/groups.go +++ b/cmd/workspace/groups.go @@ -1,7 +1,6 @@ package workspace import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) @@ -55,11 +54,3 @@ func Groups() []cobra.Group { }, } } - -func init() { - // Register groups with parent command - groups := Groups() - for i := range groups { - root.RootCmd.AddGroup(&groups[i]) - } -} diff --git a/cmd/workspace/groups/groups.go b/cmd/workspace/groups/groups.go index 39a95aada..0ef9a2696 100755 --- a/cmd/workspace/groups/groups.go +++ b/cmd/workspace/groups/groups.go @@ -3,8 +3,6 @@ package groups import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,59 +10,81 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "groups", - Short: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.`, - Long: `Groups simplify identity management, making it easier to assign access to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "groups", + Short: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects.`, + Long: `Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects. It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups, instead of to users individually. All Databricks workspace identities can be assigned as members of groups, and members inherit permissions that are assigned to their group.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.Group -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.Group + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new group.`, - Long: `Create a new group. + cmd.Use = "create" + cmd.Short = `Create a new group.` + cmd.Long = `Create a new group. Creates a group in the Databricks workspace with a unique name, using the - supplied group details.`, + supplied group details.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,51 +101,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteGroupRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteGroupRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteGroupRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a group.`, - Long: `Delete a group. + cmd.Use = "delete ID" + cmd.Short = `Delete a group.` + cmd.Long = `Delete a group. - Deletes a group from the Databricks workspace.`, + Deletes a group from the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." - names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks workspace") - } deleteReq.Id = args[0] err = w.Groups.Delete(ctx, deleteReq) @@ -133,51 +162,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetGroupRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetGroupRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetGroupRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get group details.`, - Long: `Get group details. + cmd.Use = "get ID" + cmd.Short = `Get group details.` + cmd.Long = `Get group details. - Gets the information for a specific group in the Databricks workspace.`, + Gets the information for a specific group in the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." - names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks workspace") - } getReq.Id = args[0] response, err := w.Groups.Get(ctx, getReq) @@ -185,48 +223,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListGroupsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListGroupsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListGroupsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List group details.`, - Long: `List group details. + cmd.Use = "list" + cmd.Short = `List group details.` + cmd.Long = `List group details. - Gets all details of the groups associated with the Databricks workspace.`, + Gets all details of the groups associated with the Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -243,36 +303,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update group details.`, - Long: `Update group details. + cmd.Use = "patch ID" + cmd.Short = `Update group details.` + cmd.Long = `Update group details. - Partially updates the details of a group.`, + Partially updates the details of a group.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -282,23 +368,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." - names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a group in the databricks workspace") - } patchReq.Id = args[0] err = w.Groups.Patch(ctx, patchReq) @@ -306,42 +375,71 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.Group -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.Group, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.Group + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a human-readable group name.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks group ID.`) // TODO: array: members // TODO: complex arg: meta // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a group.`, - Long: `Replace a group. + cmd.Use = "update ID" + cmd.Short = `Replace a group.` + cmd.Long = `Replace a group. - Updates the details of a group by replacing the entire group entity.`, + Updates the details of a group by replacing the entire group entity.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -351,23 +449,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." - names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks group ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks group id") - } updateReq.Id = args[0] } @@ -376,10 +457,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Groups diff --git a/cmd/workspace/groups/overrides.go b/cmd/workspace/groups/overrides.go index 28c91c4d2..db9c7610c 100644 --- a/cmd/workspace/groups/overrides.go +++ b/cmd/workspace/groups/overrides.go @@ -1,10 +1,18 @@ package groups -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListGroupsRequest) { listReq.Attributes = "id,displayName" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.DisplayName}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/instance-pools/instance-pools.go b/cmd/workspace/instance-pools/instance-pools.go index 80c091e2a..e1e3cd21d 100755 --- a/cmd/workspace/instance-pools/instance-pools.go +++ b/cmd/workspace/instance-pools/instance-pools.go @@ -3,8 +3,6 @@ package instance_pools import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "instance-pools", - Short: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.`, - Long: `Instance Pools API are used to create, edit, delete and list instance pools by +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "instance-pools", + Short: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times.`, + Long: `Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times. @@ -33,52 +36,69 @@ var Cmd = &cobra.Command{ Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does apply. See pricing.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq compute.CreateInstancePool -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *compute.CreateInstancePool, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq compute.CreateInstancePool + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: map via StringToStringVar: custom_tags // TODO: complex arg: disk_spec - createCmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes - createCmd.Flags().IntVar(&createReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", createReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) + cmd.Flags().IntVar(&createReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", createReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) // TODO: complex arg: instance_pool_fleet_attributes - createCmd.Flags().IntVar(&createReq.MaxCapacity, "max-capacity", createReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) - createCmd.Flags().IntVar(&createReq.MinIdleInstances, "min-idle-instances", createReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) + cmd.Flags().IntVar(&createReq.MaxCapacity, "max-capacity", createReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) + cmd.Flags().IntVar(&createReq.MinIdleInstances, "min-idle-instances", createReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images // TODO: array: preloaded_spark_versions -} - -var createCmd = &cobra.Command{ - Use: "create INSTANCE_POOL_NAME NODE_TYPE_ID", - Short: `Create a new instance pool.`, - Long: `Create a new instance pool. + cmd.Use = "create INSTANCE_POOL_NAME NODE_TYPE_ID" + cmd.Short = `Create a new instance pool.` + cmd.Long = `Create a new instance pool. - Creates a new instance pool using idle and ready-to-use cloud instances.`, + Creates a new instance pool using idle and ready-to-use cloud instances.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -97,34 +117,63 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq compute.DeleteInstancePool -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *compute.DeleteInstancePool, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq compute.DeleteInstancePool + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete INSTANCE_POOL_ID", - Short: `Delete an instance pool.`, - Long: `Delete an instance pool. + cmd.Use = "delete INSTANCE_POOL_ID" + cmd.Short = `Delete an instance pool.` + cmd.Long = `Delete an instance pool. Deletes the instance pool permanently. The idle instances in the pool are - terminated asynchronously.`, + terminated asynchronously.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -134,23 +183,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." - names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The instance pool to be terminated") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the instance pool to be terminated") - } deleteReq.InstancePoolId = args[0] } @@ -159,53 +191,75 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq compute.EditInstancePool -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.EditInstancePool, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.EditInstancePool + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_attributes // TODO: complex arg: azure_attributes // TODO: map via StringToStringVar: custom_tags // TODO: complex arg: disk_spec - editCmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) + cmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes - editCmd.Flags().IntVar(&editReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", editReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) + cmd.Flags().IntVar(&editReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", editReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) // TODO: complex arg: instance_pool_fleet_attributes - editCmd.Flags().IntVar(&editReq.MaxCapacity, "max-capacity", editReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) - editCmd.Flags().IntVar(&editReq.MinIdleInstances, "min-idle-instances", editReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) + cmd.Flags().IntVar(&editReq.MaxCapacity, "max-capacity", editReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) + cmd.Flags().IntVar(&editReq.MinIdleInstances, "min-idle-instances", editReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images // TODO: array: preloaded_spark_versions -} - -var editCmd = &cobra.Command{ - Use: "edit INSTANCE_POOL_ID INSTANCE_POOL_NAME NODE_TYPE_ID", - Short: `Edit an existing instance pool.`, - Long: `Edit an existing instance pool. + cmd.Use = "edit INSTANCE_POOL_ID INSTANCE_POOL_NAME NODE_TYPE_ID" + cmd.Short = `Edit an existing instance pool.` + cmd.Long = `Edit an existing instance pool. - Modifies the configuration of an existing instance pool.`, + Modifies the configuration of an existing instance pool.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -225,51 +279,60 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq compute.GetInstancePoolRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetInstancePoolRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetInstancePoolRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get INSTANCE_POOL_ID", - Short: `Get instance pool information.`, - Long: `Get instance pool information. + cmd.Use = "get INSTANCE_POOL_ID" + cmd.Short = `Get instance pool information.` + cmd.Long = `Get instance pool information. - Retrieve the information for an instance pool based on its identifier.`, + Retrieve the information for an instance pool based on its identifier.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." - names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical unique identifier for the instance pool") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical unique identifier for the instance pool") - } getReq.InstancePoolId = args[0] response, err := w.InstancePools.Get(ctx, getReq) @@ -277,29 +340,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List instance pool info.`, - Long: `List instance pool info. + cmd.Use = "list" + cmd.Short = `List instance pool info.` + cmd.Long = `List instance pool info. - Gets a list of instance pools with their statistics.`, + Gets a list of instance pools with their statistics.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.InstancePools.ListAll(ctx) @@ -307,10 +388,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service InstancePools diff --git a/cmd/workspace/instance-pools/overrides.go b/cmd/workspace/instance-pools/overrides.go index 11a76bdd5..f62f8c536 100644 --- a/cmd/workspace/instance-pools/overrides.go +++ b/cmd/workspace/instance-pools/overrides.go @@ -1,9 +1,16 @@ package instance_pools -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.InstancePoolId|green}} {{.InstancePoolName}} {{.NodeTypeId}} {{.State}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/instance-profiles/instance-profiles.go b/cmd/workspace/instance-profiles/instance-profiles.go index 17eea267c..0922a5ae3 100755 --- a/cmd/workspace/instance-profiles/instance-profiles.go +++ b/cmd/workspace/instance-profiles/instance-profiles.go @@ -10,53 +10,75 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "instance-profiles", - Short: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.`, - Long: `The Instance Profiles API allows admins to add, list, and remove instance +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "instance-profiles", + Short: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with.`, + Long: `The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3 buckets] using instance profiles for more information. [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start add command -var addReq compute.AddInstanceProfile -var addJson flags.JsonFlag -func init() { - Cmd.AddCommand(addCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var addOverrides []func( + *cobra.Command, + *compute.AddInstanceProfile, +) + +func newAdd() *cobra.Command { + cmd := &cobra.Command{} + + var addReq compute.AddInstanceProfile + var addJson flags.JsonFlag + // TODO: short flags - addCmd.Flags().Var(&addJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&addJson, "json", `either inline JSON string or @path/to/file.json with request body`) - addCmd.Flags().StringVar(&addReq.IamRoleArn, "iam-role-arn", addReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - addCmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) - addCmd.Flags().BoolVar(&addReq.SkipValidation, "skip-validation", addReq.SkipValidation, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) + cmd.Flags().StringVar(&addReq.IamRoleArn, "iam-role-arn", addReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) + cmd.Flags().BoolVar(&addReq.IsMetaInstanceProfile, "is-meta-instance-profile", addReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) + cmd.Flags().BoolVar(&addReq.SkipValidation, "skip-validation", addReq.SkipValidation, `By default, Databricks validates that it has sufficient permissions to launch instances with the instance profile.`) -} - -var addCmd = &cobra.Command{ - Use: "add INSTANCE_PROFILE_ARN", - Short: `Register an instance profile.`, - Long: `Register an instance profile. + cmd.Use = "add INSTANCE_PROFILE_ARN" + cmd.Short = `Register an instance profile.` + cmd.Long = `Register an instance profile. In the UI, you can select the instance profile when launching clusters. This - API is only available to admin users.`, + API is only available to admin users.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,30 +96,50 @@ var addCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range addOverrides { + fn(cmd, &addReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAdd()) + }) } // start edit command -var editReq compute.InstanceProfile -var editJson flags.JsonFlag -func init() { - Cmd.AddCommand(editCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *compute.InstanceProfile, +) + +func newEdit() *cobra.Command { + cmd := &cobra.Command{} + + var editReq compute.InstanceProfile + var editJson flags.JsonFlag + // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().StringVar(&editReq.IamRoleArn, "iam-role-arn", editReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) - editCmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) + cmd.Flags().StringVar(&editReq.IamRoleArn, "iam-role-arn", editReq.IamRoleArn, `The AWS IAM role ARN of the role associated with the instance profile.`) + cmd.Flags().BoolVar(&editReq.IsMetaInstanceProfile, "is-meta-instance-profile", editReq.IsMetaInstanceProfile, `Boolean flag indicating whether the instance profile should only be used in credential passthrough scenarios.`) -} - -var editCmd = &cobra.Command{ - Use: "edit INSTANCE_PROFILE_ARN", - Short: `Edit an instance profile.`, - Long: `Edit an instance profile. + cmd.Use = "edit INSTANCE_PROFILE_ARN" + cmd.Short = `Edit an instance profile.` + cmd.Long = `Edit an instance profile. The only supported field to change is the optional IAM role ARN associated with the instance profile. It is required to specify the IAM role ARN if both @@ -113,18 +155,20 @@ var editCmd = &cobra.Command{ This API is only available to admin users. [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html - [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html`, + [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -142,31 +186,49 @@ var editCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List available instance profiles.`, - Long: `List available instance profiles. + cmd.Use = "list" + cmd.Short = `List available instance profiles.` + cmd.Long = `List available instance profiles. List the instance profiles that the calling user can use to launch a cluster. - This API is available to all users.`, + This API is available to all users.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.InstanceProfiles.ListAll(ctx) @@ -174,43 +236,65 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start remove command -var removeReq compute.RemoveInstanceProfile -var removeJson flags.JsonFlag -func init() { - Cmd.AddCommand(removeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var removeOverrides []func( + *cobra.Command, + *compute.RemoveInstanceProfile, +) + +func newRemove() *cobra.Command { + cmd := &cobra.Command{} + + var removeReq compute.RemoveInstanceProfile + var removeJson flags.JsonFlag + // TODO: short flags - removeCmd.Flags().Var(&removeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&removeJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var removeCmd = &cobra.Command{ - Use: "remove INSTANCE_PROFILE_ARN", - Short: `Remove the instance profile.`, - Long: `Remove the instance profile. + cmd.Use = "remove INSTANCE_PROFILE_ARN" + cmd.Short = `Remove the instance profile.` + cmd.Long = `Remove the instance profile. Remove the instance profile with the provided ARN. Existing clusters with this instance profile will continue to function. - This API is only accessible to admin users.`, + This API is only accessible to admin users.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -228,10 +312,24 @@ var removeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range removeOverrides { + fn(cmd, &removeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRemove()) + }) } // end service InstanceProfiles diff --git a/cmd/workspace/instance-profiles/overrides.go b/cmd/workspace/instance-profiles/overrides.go index 3b5cbd1c6..adf060528 100644 --- a/cmd/workspace/instance-profiles/overrides.go +++ b/cmd/workspace/instance-profiles/overrides.go @@ -1,9 +1,16 @@ package instance_profiles -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.InstanceProfileArn}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/ip-access-lists/ip-access-lists.go b/cmd/workspace/ip-access-lists/ip-access-lists.go index 94bd110d0..081cb385c 100755 --- a/cmd/workspace/ip-access-lists/ip-access-lists.go +++ b/cmd/workspace/ip-access-lists/ip-access-lists.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "ip-access-lists", - Short: `IP Access List enables admins to configure IP access lists.`, - Long: `IP Access List enables admins to configure IP access lists. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "ip-access-lists", + Short: `IP Access List enables admins to configure IP access lists.`, + Long: `IP Access List enables admins to configure IP access lists. IP access lists affect web application access and REST API access to this workspace only. If the feature is disabled for a workspace, all access is @@ -36,26 +41,41 @@ var Cmd = &cobra.Command{ After changes to the IP access list feature, it can take a few minutes for changes to take effect.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateIpAccessList -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateIpAccessList, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateIpAccessList + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create access list.`, - Long: `Create access list. + cmd.Use = "create" + cmd.Short = `Create access list.` + cmd.Long = `Create access list. Creates an IP access list for this workspace. @@ -72,11 +92,12 @@ var createCmd = &cobra.Command{ It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no effect until you enable the feature. See - :method:workspaceconf/setStatus`, + :method:workspaceconf/setStatus` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -94,51 +115,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.DeleteIpAccessListRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteIpAccessListRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteIpAccessListRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete IP_ACCESS_LIST_ID", - Short: `Delete access list.`, - Long: `Delete access list. + cmd.Use = "delete IP_ACCESS_LIST_ID" + cmd.Short = `Delete access list.` + cmd.Long = `Delete access list. - Deletes an IP access list, specified by its list ID.`, + Deletes an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Ip Access Lists drop-down." - names, err := w.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list to modify") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding ip access list to modify") - } deleteReq.IpAccessListId = args[0] err = w.IpAccessLists.Delete(ctx, deleteReq) @@ -146,51 +176,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetIpAccessListRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetIpAccessListRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetIpAccessListRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get IP_ACCESS_LIST_ID", - Short: `Get access list.`, - Long: `Get access list. + cmd.Use = "get IP_ACCESS_LIST_ID" + cmd.Short = `Get access list.` + cmd.Long = `Get access list. - Gets an IP access list, specified by its list ID.`, + Gets an IP access list, specified by its list ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Ip Access Lists drop-down." - names, err := w.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list to modify") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding ip access list to modify") - } getReq.IpAccessListId = args[0] response, err := w.IpAccessLists.Get(ctx, getReq) @@ -198,29 +237,47 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get access lists.`, - Long: `Get access lists. + cmd.Use = "list" + cmd.Short = `Get access lists.` + cmd.Long = `Get access lists. - Gets all IP access lists for the specified workspace.`, + Gets all IP access lists for the specified workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.IpAccessLists.ListAll(ctx) @@ -228,29 +285,49 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start replace command -var replaceReq settings.ReplaceIpAccessList -var replaceJson flags.JsonFlag -func init() { - Cmd.AddCommand(replaceCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var replaceOverrides []func( + *cobra.Command, + *settings.ReplaceIpAccessList, +) + +func newReplace() *cobra.Command { + cmd := &cobra.Command{} + + var replaceReq settings.ReplaceIpAccessList + var replaceJson flags.JsonFlag + // TODO: short flags - replaceCmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&replaceJson, "json", `either inline JSON string or @path/to/file.json with request body`) - replaceCmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&replaceReq.ListId, "list-id", replaceReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var replaceCmd = &cobra.Command{ - Use: "replace", - Short: `Replace access list.`, - Long: `Replace access list. + cmd.Use = "replace" + cmd.Short = `Replace access list.` + cmd.Long = `Replace access list. Replaces an IP access list, specified by its ID. @@ -263,11 +340,12 @@ var replaceCmd = &cobra.Command{ calling user's current IP, error 400 is returned with error_code value INVALID_STATE. It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the - feature. See :method:workspaceconf/setStatus.`, + feature. See :method:workspaceconf/setStatus.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -285,29 +363,49 @@ var replaceCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range replaceOverrides { + fn(cmd, &replaceReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReplace()) + }) } // start update command -var updateReq settings.UpdateIpAccessList -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *settings.UpdateIpAccessList, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq settings.UpdateIpAccessList + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) + cmd.Flags().StringVar(&updateReq.ListId, "list-id", updateReq.ListId, `Universally unique identifier (UUID) of the IP access list.`) -} - -var updateCmd = &cobra.Command{ - Use: "update", - Short: `Update access list.`, - Long: `Update access list. + cmd.Use = "update" + cmd.Short = `Update access list.` + cmd.Long = `Update access list. Updates an existing IP access list, specified by its ID. @@ -324,11 +422,12 @@ var updateCmd = &cobra.Command{ It can take a few minutes for the changes to take effect. Note that your resulting IP access list has no effect until you enable the feature. See - :method:workspaceconf/setStatus.`, + :method:workspaceconf/setStatus.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -346,10 +445,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service IpAccessLists diff --git a/cmd/workspace/ip-access-lists/overrides.go b/cmd/workspace/ip-access-lists/overrides.go index abea3032f..ab4db1ec6 100644 --- a/cmd/workspace/ip-access-lists/overrides.go +++ b/cmd/workspace/ip-access-lists/overrides.go @@ -1,10 +1,17 @@ package ip_access_lists -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { // this command still has no Web UI listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.ListId|green}} {{.Label}} {{join .IpAddresses ","}} {{if eq .ListType "ALLOW"}}{{"ALLOW"|green}}{{else}}{{"BLOCK"|red}}{{end}} {{if .Enabled}}{{"ENABLED"|green}}{{else}}{{"DISABLED"|red}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index 41101bdbf..49d7edbd1 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "jobs", - Short: `The Jobs API allows you to create, edit, and delete jobs.`, - Long: `The Jobs API allows you to create, edit, and delete jobs. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "jobs", + Short: `The Jobs API allows you to create, edit, and delete jobs.`, + Long: `The Jobs API allows you to create, edit, and delete jobs. You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with scalable resources. Your job can consist of a single @@ -34,33 +39,57 @@ var Cmd = &cobra.Command{ [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets`, - Annotations: map[string]string{ - "package": "jobs", - }, + GroupID: "jobs", + Annotations: map[string]string{ + "package": "jobs", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start cancel-all-runs command -var cancelAllRunsReq jobs.CancelAllRuns -var cancelAllRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(cancelAllRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cancelAllRunsOverrides []func( + *cobra.Command, + *jobs.CancelAllRuns, +) + +func newCancelAllRuns() *cobra.Command { + cmd := &cobra.Command{} + + var cancelAllRunsReq jobs.CancelAllRuns + var cancelAllRunsJson flags.JsonFlag + // TODO: short flags - cancelAllRunsCmd.Flags().Var(&cancelAllRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&cancelAllRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var cancelAllRunsCmd = &cobra.Command{ - Use: "cancel-all-runs JOB_ID", - Short: `Cancel all runs of a job.`, - Long: `Cancel all runs of a job. + cmd.Use = "cancel-all-runs JOB_ID" + cmd.Short = `Cancel all runs of a job.` + cmd.Long = `Cancel all runs of a job. Cancels all active runs of a job. The runs are canceled asynchronously, so it - doesn't prevent new runs from being started.`, + doesn't prevent new runs from being started.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,23 +99,6 @@ var cancelAllRunsCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to cancel all runs of") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the job to cancel all runs of") - } _, err = fmt.Sscan(args[0], &cancelAllRunsReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -98,40 +110,68 @@ var cancelAllRunsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range cancelAllRunsOverrides { + fn(cmd, &cancelAllRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCancelAllRuns()) + }) } // start cancel-run command -var cancelRunReq jobs.CancelRun -var cancelRunJson flags.JsonFlag -var cancelRunSkipWait bool -var cancelRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cancelRunOverrides []func( + *cobra.Command, + *jobs.CancelRun, +) -func init() { - Cmd.AddCommand(cancelRunCmd) +func newCancelRun() *cobra.Command { + cmd := &cobra.Command{} - cancelRunCmd.Flags().BoolVar(&cancelRunSkipWait, "no-wait", cancelRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - cancelRunCmd.Flags().DurationVar(&cancelRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var cancelRunReq jobs.CancelRun + var cancelRunJson flags.JsonFlag + + var cancelRunSkipWait bool + var cancelRunTimeout time.Duration + + cmd.Flags().BoolVar(&cancelRunSkipWait, "no-wait", cancelRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&cancelRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - cancelRunCmd.Flags().Var(&cancelRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&cancelRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var cancelRunCmd = &cobra.Command{ - Use: "cancel-run RUN_ID", - Short: `Cancel a job run.`, - Long: `Cancel a job run. + cmd.Use = "cancel-run RUN_ID" + cmd.Short = `Cancel a job run.` + cmd.Long = `Cancel a job run. Cancels a job run. The run is canceled asynchronously, so it may still be - running when this request completes.`, + running when this request completes.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -141,23 +181,6 @@ var cancelRunCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "This field is required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have this field is required") - } _, err = fmt.Sscan(args[0], &cancelRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -188,40 +211,62 @@ var cancelRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range cancelRunOverrides { + fn(cmd, &cancelRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCancelRun()) + }) } // start create command -var createReq jobs.CreateJob -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *jobs.CreateJob, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq jobs.CreateJob + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new job.`, - Long: `Create a new job. + cmd.Use = "create" + cmd.Short = `Create a new job.` + cmd.Long = `Create a new job. - Create a new job.`, + Create a new job.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -239,33 +284,62 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq jobs.DeleteJob -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *jobs.DeleteJob, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq jobs.DeleteJob + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete JOB_ID", - Short: `Delete a job.`, - Long: `Delete a job. + cmd.Use = "delete JOB_ID" + cmd.Short = `Delete a job.` + cmd.Long = `Delete a job. - Deletes a job.`, + Deletes a job.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -275,23 +349,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to delete") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the job to delete") - } _, err = fmt.Sscan(args[0], &deleteReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -303,33 +360,62 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start delete-run command -var deleteRunReq jobs.DeleteRun -var deleteRunJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunOverrides []func( + *cobra.Command, + *jobs.DeleteRun, +) + +func newDeleteRun() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunReq jobs.DeleteRun + var deleteRunJson flags.JsonFlag + // TODO: short flags - deleteRunCmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteRunCmd = &cobra.Command{ - Use: "delete-run RUN_ID", - Short: `Delete a job run.`, - Long: `Delete a job run. + cmd.Use = "delete-run RUN_ID" + cmd.Short = `Delete a job run.` + cmd.Long = `Delete a job run. - Deletes a non-active run. Returns an error if the run is active.`, + Deletes a non-active run. Returns an error if the run is active.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -339,23 +425,6 @@ var deleteRunCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the run for which to retrieve the metadata") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the run for which to retrieve the metadata") - } _, err = fmt.Sscan(args[0], &deleteRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -367,53 +436,62 @@ var deleteRunCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteRunOverrides { + fn(cmd, &deleteRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRun()) + }) } // start export-run command -var exportRunReq jobs.ExportRunRequest -func init() { - Cmd.AddCommand(exportRunCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportRunOverrides []func( + *cobra.Command, + *jobs.ExportRunRequest, +) + +func newExportRun() *cobra.Command { + cmd := &cobra.Command{} + + var exportRunReq jobs.ExportRunRequest + // TODO: short flags - exportRunCmd.Flags().Var(&exportRunReq.ViewsToExport, "views-to-export", `Which views to export (CODE, DASHBOARDS, or ALL).`) + cmd.Flags().Var(&exportRunReq.ViewsToExport, "views-to-export", `Which views to export (CODE, DASHBOARDS, or ALL).`) -} - -var exportRunCmd = &cobra.Command{ - Use: "export-run RUN_ID", - Short: `Export and retrieve a job run.`, - Long: `Export and retrieve a job run. + cmd.Use = "export-run RUN_ID" + cmd.Short = `Export and retrieve a job run.` + cmd.Long = `Export and retrieve a job run. - Export and retrieve the job run task.`, + Export and retrieve the job run task.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier for the run") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier for the run") - } _, err = fmt.Sscan(args[0], &exportRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -424,51 +502,60 @@ var exportRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportRunOverrides { + fn(cmd, &exportRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportRun()) + }) } // start get command -var getReq jobs.GetJobRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *jobs.GetJobRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq jobs.GetJobRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get JOB_ID", - Short: `Get a single job.`, - Long: `Get a single job. + cmd.Use = "get JOB_ID" + cmd.Short = `Get a single job.` + cmd.Long = `Get a single job. - Retrieves the details for a single job.`, + Retrieves the details for a single job.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to retrieve information about") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the job to retrieve information about") - } _, err = fmt.Sscan(args[0], &getReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -479,59 +566,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-run command -var getRunReq jobs.GetRunRequest -var getRunSkipWait bool -var getRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOverrides []func( + *cobra.Command, + *jobs.GetRunRequest, +) -func init() { - Cmd.AddCommand(getRunCmd) +func newGetRun() *cobra.Command { + cmd := &cobra.Command{} - getRunCmd.Flags().BoolVar(&getRunSkipWait, "no-wait", getRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - getRunCmd.Flags().DurationVar(&getRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var getRunReq jobs.GetRunRequest + + var getRunSkipWait bool + var getRunTimeout time.Duration + + cmd.Flags().BoolVar(&getRunSkipWait, "no-wait", getRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&getRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - getRunCmd.Flags().BoolVar(&getRunReq.IncludeHistory, "include-history", getRunReq.IncludeHistory, `Whether to include the repair history in the response.`) + cmd.Flags().BoolVar(&getRunReq.IncludeHistory, "include-history", getRunReq.IncludeHistory, `Whether to include the repair history in the response.`) -} - -var getRunCmd = &cobra.Command{ - Use: "get-run RUN_ID", - Short: `Get a single job run.`, - Long: `Get a single job run. + cmd.Use = "get-run RUN_ID" + cmd.Short = `Get a single job run.` + cmd.Long = `Get a single job run. - Retrieve the metadata of a run.`, + Retrieve the metadata of a run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the run for which to retrieve the metadata") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the run for which to retrieve the metadata") - } _, err = fmt.Sscan(args[0], &getRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -542,25 +637,45 @@ var getRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOverrides { + fn(cmd, &getRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRun()) + }) } // start get-run-output command -var getRunOutputReq jobs.GetRunOutputRequest -func init() { - Cmd.AddCommand(getRunOutputCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getRunOutputOverrides []func( + *cobra.Command, + *jobs.GetRunOutputRequest, +) + +func newGetRunOutput() *cobra.Command { + cmd := &cobra.Command{} + + var getRunOutputReq jobs.GetRunOutputRequest + // TODO: short flags -} - -var getRunOutputCmd = &cobra.Command{ - Use: "get-run-output RUN_ID", - Short: `Get the output for a single run.`, - Long: `Get the output for a single run. + cmd.Use = "get-run-output RUN_ID" + cmd.Short = `Get the output for a single run.` + cmd.Long = `Get the output for a single run. Retrieve the output and metadata of a single task run. When a notebook task returns a value through the dbutils.notebook.exit() call, you can use this @@ -571,31 +686,20 @@ var getRunOutputCmd = &cobra.Command{ This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to reference them beyond - 60 days, you must save old run results before they expire.`, + 60 days, you must save old run results before they expire.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier for the run") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier for the run") - } _, err = fmt.Sscan(args[0], &getRunOutputReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -606,46 +710,68 @@ var getRunOutputCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getRunOutputOverrides { + fn(cmd, &getRunOutputReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetRunOutput()) + }) } // start list command -var listReq jobs.ListJobsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *jobs.ListJobsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq jobs.ListJobsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().BoolVar(&listReq.ExpandTasks, "expand-tasks", listReq.ExpandTasks, `Whether to include task and cluster details in the response.`) - listCmd.Flags().IntVar(&listReq.Limit, "limit", listReq.Limit, `The number of jobs to return.`) - listCmd.Flags().StringVar(&listReq.Name, "name", listReq.Name, `A filter on the list based on the exact (case insensitive) job name.`) - listCmd.Flags().IntVar(&listReq.Offset, "offset", listReq.Offset, `The offset of the first job to return, relative to the most recently created job.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of jobs respectively.`) + cmd.Flags().BoolVar(&listReq.ExpandTasks, "expand-tasks", listReq.ExpandTasks, `Whether to include task and cluster details in the response.`) + cmd.Flags().IntVar(&listReq.Limit, "limit", listReq.Limit, `The number of jobs to return.`) + cmd.Flags().StringVar(&listReq.Name, "name", listReq.Name, `A filter on the list based on the exact (case insensitive) job name.`) + cmd.Flags().IntVar(&listReq.Offset, "offset", listReq.Offset, `The offset of the first job to return, relative to the most recently created job.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of jobs respectively.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List jobs.`, - Long: `List jobs. + cmd.Use = "list" + cmd.Short = `List jobs.` + cmd.Long = `List jobs. - Retrieves a list of jobs.`, + Retrieves a list of jobs.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -662,51 +788,73 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-runs command -var listRunsReq jobs.ListRunsRequest -var listRunsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listRunsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listRunsOverrides []func( + *cobra.Command, + *jobs.ListRunsRequest, +) + +func newListRuns() *cobra.Command { + cmd := &cobra.Command{} + + var listRunsReq jobs.ListRunsRequest + var listRunsJson flags.JsonFlag + // TODO: short flags - listRunsCmd.Flags().Var(&listRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listRunsCmd.Flags().BoolVar(&listRunsReq.ActiveOnly, "active-only", listRunsReq.ActiveOnly, `If active_only is true, only active runs are included in the results; otherwise, lists both active and completed runs.`) - listRunsCmd.Flags().BoolVar(&listRunsReq.CompletedOnly, "completed-only", listRunsReq.CompletedOnly, `If completed_only is true, only completed runs are included in the results; otherwise, lists both active and completed runs.`) - listRunsCmd.Flags().BoolVar(&listRunsReq.ExpandTasks, "expand-tasks", listRunsReq.ExpandTasks, `Whether to include task and cluster details in the response.`) - listRunsCmd.Flags().Int64Var(&listRunsReq.JobId, "job-id", listRunsReq.JobId, `The job for which to list runs.`) - listRunsCmd.Flags().IntVar(&listRunsReq.Limit, "limit", listRunsReq.Limit, `The number of runs to return.`) - listRunsCmd.Flags().IntVar(&listRunsReq.Offset, "offset", listRunsReq.Offset, `The offset of the first run to return, relative to the most recent run.`) - listRunsCmd.Flags().StringVar(&listRunsReq.PageToken, "page-token", listRunsReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of runs respectively.`) - listRunsCmd.Flags().Var(&listRunsReq.RunType, "run-type", `The type of runs to return.`) - listRunsCmd.Flags().IntVar(&listRunsReq.StartTimeFrom, "start-time-from", listRunsReq.StartTimeFrom, `Show runs that started _at or after_ this value.`) - listRunsCmd.Flags().IntVar(&listRunsReq.StartTimeTo, "start-time-to", listRunsReq.StartTimeTo, `Show runs that started _at or before_ this value.`) + cmd.Flags().BoolVar(&listRunsReq.ActiveOnly, "active-only", listRunsReq.ActiveOnly, `If active_only is true, only active runs are included in the results; otherwise, lists both active and completed runs.`) + cmd.Flags().BoolVar(&listRunsReq.CompletedOnly, "completed-only", listRunsReq.CompletedOnly, `If completed_only is true, only completed runs are included in the results; otherwise, lists both active and completed runs.`) + cmd.Flags().BoolVar(&listRunsReq.ExpandTasks, "expand-tasks", listRunsReq.ExpandTasks, `Whether to include task and cluster details in the response.`) + cmd.Flags().Int64Var(&listRunsReq.JobId, "job-id", listRunsReq.JobId, `The job for which to list runs.`) + cmd.Flags().IntVar(&listRunsReq.Limit, "limit", listRunsReq.Limit, `The number of runs to return.`) + cmd.Flags().IntVar(&listRunsReq.Offset, "offset", listRunsReq.Offset, `The offset of the first run to return, relative to the most recent run.`) + cmd.Flags().StringVar(&listRunsReq.PageToken, "page-token", listRunsReq.PageToken, `Use next_page_token or prev_page_token returned from the previous request to list the next or previous page of runs respectively.`) + cmd.Flags().Var(&listRunsReq.RunType, "run-type", `The type of runs to return.`) + cmd.Flags().IntVar(&listRunsReq.StartTimeFrom, "start-time-from", listRunsReq.StartTimeFrom, `Show runs that started _at or after_ this value.`) + cmd.Flags().IntVar(&listRunsReq.StartTimeTo, "start-time-to", listRunsReq.StartTimeTo, `Show runs that started _at or before_ this value.`) -} - -var listRunsCmd = &cobra.Command{ - Use: "list-runs", - Short: `List job runs.`, - Long: `List job runs. + cmd.Use = "list-runs" + cmd.Short = `List job runs.` + cmd.Long = `List job runs. - List runs in descending order by start time.`, + List runs in descending order by start time.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -723,54 +871,82 @@ var listRunsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listRunsOverrides { + fn(cmd, &listRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListRuns()) + }) } // start repair-run command -var repairRunReq jobs.RepairRun -var repairRunJson flags.JsonFlag -var repairRunSkipWait bool -var repairRunTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var repairRunOverrides []func( + *cobra.Command, + *jobs.RepairRun, +) -func init() { - Cmd.AddCommand(repairRunCmd) +func newRepairRun() *cobra.Command { + cmd := &cobra.Command{} - repairRunCmd.Flags().BoolVar(&repairRunSkipWait, "no-wait", repairRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - repairRunCmd.Flags().DurationVar(&repairRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var repairRunReq jobs.RepairRun + var repairRunJson flags.JsonFlag + + var repairRunSkipWait bool + var repairRunTimeout time.Duration + + cmd.Flags().BoolVar(&repairRunSkipWait, "no-wait", repairRunSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&repairRunTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - repairRunCmd.Flags().Var(&repairRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&repairRunJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: dbt_commands // TODO: array: jar_params - repairRunCmd.Flags().Int64Var(&repairRunReq.LatestRepairId, "latest-repair-id", repairRunReq.LatestRepairId, `The ID of the latest repair.`) + cmd.Flags().Int64Var(&repairRunReq.LatestRepairId, "latest-repair-id", repairRunReq.LatestRepairId, `The ID of the latest repair.`) // TODO: map via StringToStringVar: notebook_params // TODO: complex arg: pipeline_params // TODO: map via StringToStringVar: python_named_params // TODO: array: python_params - repairRunCmd.Flags().BoolVar(&repairRunReq.RerunAllFailedTasks, "rerun-all-failed-tasks", repairRunReq.RerunAllFailedTasks, `If true, repair all failed tasks.`) - repairRunCmd.Flags().BoolVar(&repairRunReq.RerunDependentTasks, "rerun-dependent-tasks", repairRunReq.RerunDependentTasks, `If true, repair all tasks that depend on the tasks in rerun_tasks, even if they were previously successful.`) + cmd.Flags().BoolVar(&repairRunReq.RerunAllFailedTasks, "rerun-all-failed-tasks", repairRunReq.RerunAllFailedTasks, `If true, repair all failed tasks.`) + cmd.Flags().BoolVar(&repairRunReq.RerunDependentTasks, "rerun-dependent-tasks", repairRunReq.RerunDependentTasks, `If true, repair all tasks that depend on the tasks in rerun_tasks, even if they were previously successful.`) // TODO: array: rerun_tasks // TODO: array: spark_submit_params // TODO: map via StringToStringVar: sql_params -} - -var repairRunCmd = &cobra.Command{ - Use: "repair-run RUN_ID", - Short: `Repair a job run.`, - Long: `Repair a job run. + cmd.Use = "repair-run RUN_ID" + cmd.Short = `Repair a job run.` + cmd.Long = `Repair a job run. Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job and task settings, and can be viewed in the history - for the original job run.`, + for the original job run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -780,23 +956,6 @@ var repairRunCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The job run ID of the run to repair") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the job run id of the run to repair") - } _, err = fmt.Sscan(args[0], &repairRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -827,34 +986,55 @@ var repairRunCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range repairRunOverrides { + fn(cmd, &repairRunReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRepairRun()) + }) } // start reset command -var resetReq jobs.ResetJob -var resetJson flags.JsonFlag -func init() { - Cmd.AddCommand(resetCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resetOverrides []func( + *cobra.Command, + *jobs.ResetJob, +) + +func newReset() *cobra.Command { + cmd := &cobra.Command{} + + var resetReq jobs.ResetJob + var resetJson flags.JsonFlag + // TODO: short flags - resetCmd.Flags().Var(&resetJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&resetJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var resetCmd = &cobra.Command{ - Use: "reset", - Short: `Overwrites all settings for a job.`, - Long: `Overwrites all settings for a job. + cmd.Use = "reset" + cmd.Short = `Overwrites all settings for a job.` + cmd.Long = `Overwrites all settings for a job. Overwrites all the settings for a specific job. Use the Update endpoint to - update job settings partially.`, + update job settings partially.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -872,29 +1052,51 @@ var resetCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resetOverrides { + fn(cmd, &resetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReset()) + }) } // start run-now command -var runNowReq jobs.RunNow -var runNowJson flags.JsonFlag -var runNowSkipWait bool -var runNowTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var runNowOverrides []func( + *cobra.Command, + *jobs.RunNow, +) -func init() { - Cmd.AddCommand(runNowCmd) +func newRunNow() *cobra.Command { + cmd := &cobra.Command{} - runNowCmd.Flags().BoolVar(&runNowSkipWait, "no-wait", runNowSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - runNowCmd.Flags().DurationVar(&runNowTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var runNowReq jobs.RunNow + var runNowJson flags.JsonFlag + + var runNowSkipWait bool + var runNowTimeout time.Duration + + cmd.Flags().BoolVar(&runNowSkipWait, "no-wait", runNowSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&runNowTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - runNowCmd.Flags().Var(&runNowJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&runNowJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: dbt_commands - runNowCmd.Flags().StringVar(&runNowReq.IdempotencyToken, "idempotency-token", runNowReq.IdempotencyToken, `An optional token to guarantee the idempotency of job run requests.`) + cmd.Flags().StringVar(&runNowReq.IdempotencyToken, "idempotency-token", runNowReq.IdempotencyToken, `An optional token to guarantee the idempotency of job run requests.`) // TODO: array: jar_params // TODO: array: job_parameters // TODO: map via StringToStringVar: notebook_params @@ -904,18 +1106,24 @@ func init() { // TODO: array: spark_submit_params // TODO: map via StringToStringVar: sql_params -} - -var runNowCmd = &cobra.Command{ - Use: "run-now JOB_ID", - Short: `Trigger a new job run.`, - Long: `Trigger a new job run. + cmd.Use = "run-now JOB_ID" + cmd.Short = `Trigger a new job run.` + cmd.Long = `Trigger a new job run. - Run a job and return the run_id of the triggered run.`, + Run a job and return the run_id of the triggered run.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -925,23 +1133,6 @@ var runNowCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the job to be executed") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the job to be executed") - } _, err = fmt.Sscan(args[0], &runNowReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -972,60 +1163,81 @@ var runNowCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range runNowOverrides { + fn(cmd, &runNowReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRunNow()) + }) } // start submit command -var submitReq jobs.SubmitRun -var submitJson flags.JsonFlag -var submitSkipWait bool -var submitTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var submitOverrides []func( + *cobra.Command, + *jobs.SubmitRun, +) -func init() { - Cmd.AddCommand(submitCmd) +func newSubmit() *cobra.Command { + cmd := &cobra.Command{} - submitCmd.Flags().BoolVar(&submitSkipWait, "no-wait", submitSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) - submitCmd.Flags().DurationVar(&submitTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) + var submitReq jobs.SubmitRun + var submitJson flags.JsonFlag + + var submitSkipWait bool + var submitTimeout time.Duration + + cmd.Flags().BoolVar(&submitSkipWait, "no-wait", submitSkipWait, `do not wait to reach TERMINATED or SKIPPED state`) + cmd.Flags().DurationVar(&submitTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach TERMINATED or SKIPPED state`) // TODO: short flags - submitCmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&submitJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list // TODO: complex arg: email_notifications // TODO: complex arg: git_source // TODO: complex arg: health - submitCmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) + cmd.Flags().StringVar(&submitReq.IdempotencyToken, "idempotency-token", submitReq.IdempotencyToken, `An optional token that can be used to guarantee the idempotency of job run requests.`) // TODO: complex arg: notification_settings - submitCmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) + cmd.Flags().StringVar(&submitReq.RunName, "run-name", submitReq.RunName, `An optional name for the run.`) // TODO: array: tasks - submitCmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) + cmd.Flags().IntVar(&submitReq.TimeoutSeconds, "timeout-seconds", submitReq.TimeoutSeconds, `An optional timeout applied to each run of this job.`) // TODO: complex arg: webhook_notifications -} - -var submitCmd = &cobra.Command{ - Use: "submit", - Short: `Create and trigger a one-time run.`, - Long: `Create and trigger a one-time run. + cmd.Use = "submit" + cmd.Short = `Create and trigger a one-time run.` + cmd.Long = `Create and trigger a one-time run. Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job. Runs submitted using this endpoint don’t display in the UI. Use the jobs/runs/get API to check the run state after the job is - submitted.`, + submitted.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1061,37 +1273,66 @@ var submitCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range submitOverrides { + fn(cmd, &submitReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSubmit()) + }) } // start update command -var updateReq jobs.UpdateJob -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *jobs.UpdateJob, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq jobs.UpdateJob + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: fields_to_remove // TODO: complex arg: new_settings -} - -var updateCmd = &cobra.Command{ - Use: "update JOB_ID", - Short: `Partially update a job.`, - Long: `Partially update a job. + cmd.Use = "update JOB_ID" + cmd.Short = `Partially update a job.` + cmd.Long = `Partially update a job. Add, update, or remove specific settings of an existing job. Use the ResetJob - to overwrite all job settings.`, + to overwrite all job settings.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1101,23 +1342,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." - names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to update") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the canonical identifier of the job to update") - } _, err = fmt.Sscan(args[0], &updateReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -1129,10 +1353,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Jobs diff --git a/cmd/workspace/jobs/overrides.go b/cmd/workspace/jobs/overrides.go index 93512c84a..fd22dcbdb 100644 --- a/cmd/workspace/jobs/overrides.go +++ b/cmd/workspace/jobs/overrides.go @@ -1,14 +1,25 @@ package jobs -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *jobs.ListJobsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .JobId}} {{.Settings.Name}} {{end}}`) +} +func listRunsOverride(listRunsCmd *cobra.Command, listRunsReq *jobs.ListRunsRequest) { listRunsCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Job ID"}} {{header "Run ID"}} {{header "Result State"}} URL {{range .}}{{green "%d" .JobId}} {{cyan "%d" .RunId}} {{if eq .State.ResultState "SUCCESS"}}{{"SUCCESS"|green}}{{else}}{{red "%s" .State.ResultState}}{{end}} {{.RunPageUrl}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) + listRunsOverrides = append(listRunsOverrides, listRunsOverride) +} diff --git a/cmd/workspace/libraries/libraries.go b/cmd/workspace/libraries/libraries.go index 70b5584ab..e230bfb02 100755 --- a/cmd/workspace/libraries/libraries.go +++ b/cmd/workspace/libraries/libraries.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "libraries", - Short: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.`, - Long: `The Libraries API allows you to install and uninstall libraries and get the +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "libraries", + Short: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster.`, + Long: `The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster. To make third-party or custom code available to notebooks and jobs running on @@ -35,30 +40,43 @@ var Cmd = &cobra.Command{ When you uninstall a library from a cluster, the library is removed only when you restart the cluster. Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start all-cluster-statuses command -func init() { - Cmd.AddCommand(allClusterStatusesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var allClusterStatusesOverrides []func( + *cobra.Command, +) -} +func newAllClusterStatuses() *cobra.Command { + cmd := &cobra.Command{} -var allClusterStatusesCmd = &cobra.Command{ - Use: "all-cluster-statuses", - Short: `Get all statuses.`, - Long: `Get all statuses. + cmd.Use = "all-cluster-statuses" + cmd.Short = `Get all statuses.` + cmd.Long = `Get all statuses. Get the status of all libraries on all clusters. A status will be available for all libraries installed on this cluster via the API or the libraries UI as - well as libraries set to be installed on all clusters via the libraries UI.`, + well as libraries set to be installed on all clusters via the libraries UI.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Libraries.AllClusterStatuses(ctx) @@ -66,25 +84,45 @@ var allClusterStatusesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range allClusterStatusesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAllClusterStatuses()) + }) } // start cluster-status command -var clusterStatusReq compute.ClusterStatusRequest -func init() { - Cmd.AddCommand(clusterStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var clusterStatusOverrides []func( + *cobra.Command, + *compute.ClusterStatusRequest, +) + +func newClusterStatus() *cobra.Command { + cmd := &cobra.Command{} + + var clusterStatusReq compute.ClusterStatusRequest + // TODO: short flags -} - -var clusterStatusCmd = &cobra.Command{ - Use: "cluster-status CLUSTER_ID", - Short: `Get status.`, - Long: `Get status. + cmd.Use = "cluster-status CLUSTER_ID" + cmd.Short = `Get status.` + cmd.Long = `Get status. Get the status of libraries on a cluster. A status will be available for all libraries installed on this cluster via the API or the libraries UI as well as @@ -100,15 +138,17 @@ var clusterStatusCmd = &cobra.Command{ 3. Libraries that were previously requested on this cluster or on all clusters, but now marked for removal. Within this group there is no order - guarantee.`, + guarantee.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -119,38 +159,59 @@ var clusterStatusCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range clusterStatusOverrides { + fn(cmd, &clusterStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newClusterStatus()) + }) } // start install command -var installReq compute.InstallLibraries -var installJson flags.JsonFlag -func init() { - Cmd.AddCommand(installCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var installOverrides []func( + *cobra.Command, + *compute.InstallLibraries, +) + +func newInstall() *cobra.Command { + cmd := &cobra.Command{} + + var installReq compute.InstallLibraries + var installJson flags.JsonFlag + // TODO: short flags - installCmd.Flags().Var(&installJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&installJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var installCmd = &cobra.Command{ - Use: "install", - Short: `Add a library.`, - Long: `Add a library. + cmd.Use = "install" + cmd.Short = `Add a library.` + cmd.Long = `Add a library. Add libraries to be installed on a cluster. The installation is asynchronous; it happens in the background after the completion of this request. **Note**: The actual set of libraries to be installed on a cluster is the union of the libraries specified via this method and the libraries set to be - installed on all clusters via the libraries UI.`, + installed on all clusters via the libraries UI.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -168,35 +229,56 @@ var installCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range installOverrides { + fn(cmd, &installReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newInstall()) + }) } // start uninstall command -var uninstallReq compute.UninstallLibraries -var uninstallJson flags.JsonFlag -func init() { - Cmd.AddCommand(uninstallCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var uninstallOverrides []func( + *cobra.Command, + *compute.UninstallLibraries, +) + +func newUninstall() *cobra.Command { + cmd := &cobra.Command{} + + var uninstallReq compute.UninstallLibraries + var uninstallJson flags.JsonFlag + // TODO: short flags - uninstallCmd.Flags().Var(&uninstallJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&uninstallJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var uninstallCmd = &cobra.Command{ - Use: "uninstall", - Short: `Uninstall libraries.`, - Long: `Uninstall libraries. + cmd.Use = "uninstall" + cmd.Short = `Uninstall libraries.` + cmd.Long = `Uninstall libraries. Set libraries to be uninstalled on a cluster. The libraries won't be uninstalled until the cluster is restarted. Uninstalling libraries that are - not installed on the cluster will have no impact but is not an error.`, + not installed on the cluster will have no impact but is not an error.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -214,10 +296,24 @@ var uninstallCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range uninstallOverrides { + fn(cmd, &uninstallReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUninstall()) + }) } // end service Libraries diff --git a/cmd/workspace/metastores/metastores.go b/cmd/workspace/metastores/metastores.go index 9db023a77..4bed9fd17 100755 --- a/cmd/workspace/metastores/metastores.go +++ b/cmd/workspace/metastores/metastores.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "metastores", - Short: `A metastore is the top-level container of objects in Unity Catalog.`, - Long: `A metastore is the top-level container of objects in Unity Catalog. It stores +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "metastores", + Short: `A metastore is the top-level container of objects in Unity Catalog.`, + Long: `A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to @@ -28,36 +33,53 @@ var Cmd = &cobra.Command{ workspaces created before Unity Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is available in a catalog named hive_metastore.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start assign command -var assignReq catalog.CreateMetastoreAssignment -func init() { - Cmd.AddCommand(assignCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var assignOverrides []func( + *cobra.Command, + *catalog.CreateMetastoreAssignment, +) + +func newAssign() *cobra.Command { + cmd := &cobra.Command{} + + var assignReq catalog.CreateMetastoreAssignment + // TODO: short flags -} - -var assignCmd = &cobra.Command{ - Use: "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID", - Short: `Create an assignment.`, - Long: `Create an assignment. + cmd.Use = "assign METASTORE_ID DEFAULT_CATALOG_NAME WORKSPACE_ID" + cmd.Short = `Create an assignment.` + cmd.Long = `Create an assignment. Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be overwritten by the new __metastore_id__ - and __default_catalog_name__. The caller must be an account admin.`, + and __default_catalog_name__. The caller must be an account admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -73,42 +95,64 @@ var assignCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range assignOverrides { + fn(cmd, &assignReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newAssign()) + }) } // start create command -var createReq catalog.CreateMetastore -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateMetastore, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateMetastore + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `Cloud region which the metastore serves (e.g., us-west-2, westus).`) + cmd.Flags().StringVar(&createReq.Region, "region", createReq.Region, `Cloud region which the metastore serves (e.g., us-west-2, westus).`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME STORAGE_ROOT", - Short: `Create a metastore.`, - Long: `Create a metastore. + cmd.Use = "create NAME STORAGE_ROOT" + cmd.Short = `Create a metastore.` + cmd.Long = `Create a metastore. - Creates a new metastore based on a provided name and storage root path.`, + Creates a new metastore based on a provided name and storage root path.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -127,29 +171,47 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start current command -func init() { - Cmd.AddCommand(currentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var currentOverrides []func( + *cobra.Command, +) -} +func newCurrent() *cobra.Command { + cmd := &cobra.Command{} -var currentCmd = &cobra.Command{ - Use: "current", - Short: `Get metastore assignment for workspace.`, - Long: `Get metastore assignment for workspace. + cmd.Use = "current" + cmd.Short = `Get metastore assignment for workspace.` + cmd.Long = `Get metastore assignment for workspace. - Gets the metastore assignment for the workspace being accessed.`, + Gets the metastore assignment for the workspace being accessed.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.Current(ctx) @@ -157,53 +219,62 @@ var currentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range currentOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCurrent()) + }) } // start delete command -var deleteReq catalog.DeleteMetastoreRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteMetastoreRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteMetastoreRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if the metastore is not empty.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a metastore.`, - Long: `Delete a metastore. + cmd.Use = "delete ID" + cmd.Short = `Delete a metastore.` + cmd.Long = `Delete a metastore. - Deletes a metastore. The caller must be a metastore admin.`, + Deletes a metastore. The caller must be a metastore admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." - names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id of the metastore") - } deleteReq.Id = args[0] err = w.Metastores.Delete(ctx, deleteReq) @@ -211,43 +282,65 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start enable-optimization command -var enableOptimizationReq catalog.UpdatePredictiveOptimization -var enableOptimizationJson flags.JsonFlag -func init() { - Cmd.AddCommand(enableOptimizationCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var enableOptimizationOverrides []func( + *cobra.Command, + *catalog.UpdatePredictiveOptimization, +) + +func newEnableOptimization() *cobra.Command { + cmd := &cobra.Command{} + + var enableOptimizationReq catalog.UpdatePredictiveOptimization + var enableOptimizationJson flags.JsonFlag + // TODO: short flags - enableOptimizationCmd.Flags().Var(&enableOptimizationJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&enableOptimizationJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var enableOptimizationCmd = &cobra.Command{ - Use: "enable-optimization METASTORE_ID ENABLE", - Short: `Toggle predictive optimization on the metastore.`, - Long: `Toggle predictive optimization on the metastore. + cmd.Use = "enable-optimization METASTORE_ID ENABLE" + cmd.Short = `Toggle predictive optimization on the metastore.` + cmd.Long = `Toggle predictive optimization on the metastore. - Enables or disables predictive optimization on the metastore.`, + Enables or disables predictive optimization on the metastore.` // This command is being previewed; hide from help output. - Hidden: true, + cmd.Hidden = true - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -269,52 +362,61 @@ var enableOptimizationCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range enableOptimizationOverrides { + fn(cmd, &enableOptimizationReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEnableOptimization()) + }) } // start get command -var getReq catalog.GetMetastoreRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetMetastoreRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetMetastoreRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get a metastore.`, - Long: `Get a metastore. + cmd.Use = "get ID" + cmd.Short = `Get a metastore.` + cmd.Long = `Get a metastore. Gets a metastore that matches the supplied ID. The caller must be a metastore - admin to retrieve this info.`, + admin to retrieve this info.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." - names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id of the metastore") - } getReq.Id = args[0] response, err := w.Metastores.Get(ctx, getReq) @@ -322,31 +424,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List metastores.`, - Long: `List metastores. + cmd.Use = "list" + cmd.Short = `List metastores.` + cmd.Long = `List metastores. Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin to retrieve this info. There is no guarantee of a - specific ordering of the elements in the array.`, + specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.ListAll(ctx) @@ -354,30 +474,48 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start summary command -func init() { - Cmd.AddCommand(summaryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var summaryOverrides []func( + *cobra.Command, +) -} +func newSummary() *cobra.Command { + cmd := &cobra.Command{} -var summaryCmd = &cobra.Command{ - Use: "summary", - Short: `Get a metastore summary.`, - Long: `Get a metastore summary. + cmd.Use = "summary" + cmd.Short = `Get a metastore summary.` + cmd.Long = `Get a metastore summary. Gets information about a metastore. This summary includes the storage - credential, the cloud vendor, the cloud region, and the global metastore ID.`, + credential, the cloud vendor, the cloud region, and the global metastore ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Metastores.Summary(ctx) @@ -385,35 +523,57 @@ var summaryCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range summaryOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSummary()) + }) } // start unassign command -var unassignReq catalog.UnassignRequest -func init() { - Cmd.AddCommand(unassignCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var unassignOverrides []func( + *cobra.Command, + *catalog.UnassignRequest, +) + +func newUnassign() *cobra.Command { + cmd := &cobra.Command{} + + var unassignReq catalog.UnassignRequest + // TODO: short flags -} - -var unassignCmd = &cobra.Command{ - Use: "unassign WORKSPACE_ID METASTORE_ID", - Short: `Delete an assignment.`, - Long: `Delete an assignment. + cmd.Use = "unassign WORKSPACE_ID METASTORE_ID" + cmd.Short = `Delete an assignment.` + cmd.Long = `Delete an assignment. - Deletes a metastore assignment. The caller must be an account administrator.`, + Deletes a metastore assignment. The caller must be an account administrator.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -428,60 +588,69 @@ var unassignCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range unassignOverrides { + fn(cmd, &unassignReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUnassign()) + }) } // start update command -var updateReq catalog.UpdateMetastore -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateMetastore, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateMetastore + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.DeltaSharingOrganizationName, "delta-sharing-organization-name", updateReq.DeltaSharingOrganizationName, `The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.`) - updateCmd.Flags().Int64Var(&updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, "delta-sharing-recipient-token-lifetime-in-seconds", updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, `The lifetime of delta sharing recipient token in seconds.`) - updateCmd.Flags().Var(&updateReq.DeltaSharingScope, "delta-sharing-scope", `The scope of Delta Sharing enabled for the metastore.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The user-specified name of the metastore.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the metastore.`) - updateCmd.Flags().StringVar(&updateReq.PrivilegeModelVersion, "privilege-model-version", updateReq.PrivilegeModelVersion, `Privilege model version of the metastore, of the form major.minor (e.g., 1.0).`) - updateCmd.Flags().StringVar(&updateReq.StorageRootCredentialId, "storage-root-credential-id", updateReq.StorageRootCredentialId, `UUID of storage credential to access the metastore storage_root.`) + cmd.Flags().StringVar(&updateReq.DeltaSharingOrganizationName, "delta-sharing-organization-name", updateReq.DeltaSharingOrganizationName, `The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta Sharing as the official name.`) + cmd.Flags().Int64Var(&updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, "delta-sharing-recipient-token-lifetime-in-seconds", updateReq.DeltaSharingRecipientTokenLifetimeInSeconds, `The lifetime of delta sharing recipient token in seconds.`) + cmd.Flags().Var(&updateReq.DeltaSharingScope, "delta-sharing-scope", `The scope of Delta Sharing enabled for the metastore.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The user-specified name of the metastore.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the metastore.`) + cmd.Flags().StringVar(&updateReq.PrivilegeModelVersion, "privilege-model-version", updateReq.PrivilegeModelVersion, `Privilege model version of the metastore, of the form major.minor (e.g., 1.0).`) + cmd.Flags().StringVar(&updateReq.StorageRootCredentialId, "storage-root-credential-id", updateReq.StorageRootCredentialId, `UUID of storage credential to access the metastore storage_root.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Update a metastore.`, - Long: `Update a metastore. + cmd.Use = "update ID" + cmd.Short = `Update a metastore.` + cmd.Long = `Update a metastore. Updates information for a specific metastore. The caller must be a metastore - admin.`, + admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." - names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id of the metastore") - } updateReq.Id = args[0] response, err := w.Metastores.Update(ctx, updateReq) @@ -489,57 +658,66 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start update-assignment command -var updateAssignmentReq catalog.UpdateMetastoreAssignment -func init() { - Cmd.AddCommand(updateAssignmentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateAssignmentOverrides []func( + *cobra.Command, + *catalog.UpdateMetastoreAssignment, +) + +func newUpdateAssignment() *cobra.Command { + cmd := &cobra.Command{} + + var updateAssignmentReq catalog.UpdateMetastoreAssignment + // TODO: short flags - updateAssignmentCmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`) - updateAssignmentCmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`) + cmd.Flags().StringVar(&updateAssignmentReq.DefaultCatalogName, "default-catalog-name", updateAssignmentReq.DefaultCatalogName, `The name of the default catalog for the metastore.`) + cmd.Flags().StringVar(&updateAssignmentReq.MetastoreId, "metastore-id", updateAssignmentReq.MetastoreId, `The unique ID of the metastore.`) -} - -var updateAssignmentCmd = &cobra.Command{ - Use: "update-assignment WORKSPACE_ID", - Short: `Update an assignment.`, - Long: `Update an assignment. + cmd.Use = "update-assignment WORKSPACE_ID" + cmd.Short = `Update an assignment.` + cmd.Long = `Update an assignment. Updates a metastore assignment. This operation can be used to update __metastore_id__ or __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore. The caller must be an account admin - to update __metastore_id__; otherwise, the caller can be a Workspace admin.`, + to update __metastore_id__; otherwise, the caller can be a Workspace admin.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Metastores drop-down." - names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "A workspace ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have a workspace id") - } _, err = fmt.Sscan(args[0], &updateAssignmentReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -550,10 +728,24 @@ var updateAssignmentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateAssignmentOverrides { + fn(cmd, &updateAssignmentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateAssignment()) + }) } // end service Metastores diff --git a/cmd/workspace/metastores/overrides.go b/cmd/workspace/metastores/overrides.go index 9d1c23ac2..2c9ca6f79 100644 --- a/cmd/workspace/metastores/overrides.go +++ b/cmd/workspace/metastores/overrides.go @@ -1,10 +1,17 @@ package metastores -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{"Region"}} {{range .}}{{.MetastoreId|green}} {{.Name|cyan}} {{.Region}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/model-registry/model-registry.go b/cmd/workspace/model-registry/model-registry.go index ce7f4806c..d944e0d98 100755 --- a/cmd/workspace/model-registry/model-registry.go +++ b/cmd/workspace/model-registry/model-registry.go @@ -12,46 +12,68 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "model-registry", - Short: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, - Long: `MLflow Model Registry is a centralized model repository and a UI and set of +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "model-registry", + Short: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, + Long: `MLflow Model Registry is a centralized model repository and a UI and set of APIs that enable you to manage the full lifecycle of MLflow Models.`, - Annotations: map[string]string{ - "package": "ml", - }, + GroupID: "ml", + Annotations: map[string]string{ + "package": "ml", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start approve-transition-request command -var approveTransitionRequestReq ml.ApproveTransitionRequest -var approveTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(approveTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var approveTransitionRequestOverrides []func( + *cobra.Command, + *ml.ApproveTransitionRequest, +) + +func newApproveTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var approveTransitionRequestReq ml.ApproveTransitionRequest + var approveTransitionRequestJson flags.JsonFlag + // TODO: short flags - approveTransitionRequestCmd.Flags().Var(&approveTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&approveTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - approveTransitionRequestCmd.Flags().StringVar(&approveTransitionRequestReq.Comment, "comment", approveTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&approveTransitionRequestReq.Comment, "comment", approveTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var approveTransitionRequestCmd = &cobra.Command{ - Use: "approve-transition-request NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS", - Short: `Approve transition request.`, - Long: `Approve transition request. + cmd.Use = "approve-transition-request NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS" + cmd.Short = `Approve transition request.` + cmd.Long = `Approve transition request. - Approves a model version stage transition request.`, + Approves a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -78,42 +100,64 @@ var approveTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range approveTransitionRequestOverrides { + fn(cmd, &approveTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newApproveTransitionRequest()) + }) } // start create-comment command -var createCommentReq ml.CreateComment -var createCommentJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createCommentOverrides []func( + *cobra.Command, + *ml.CreateComment, +) + +func newCreateComment() *cobra.Command { + cmd := &cobra.Command{} + + var createCommentReq ml.CreateComment + var createCommentJson flags.JsonFlag + // TODO: short flags - createCommentCmd.Flags().Var(&createCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCommentCmd = &cobra.Command{ - Use: "create-comment NAME VERSION COMMENT", - Short: `Post a comment.`, - Long: `Post a comment. + cmd.Use = "create-comment NAME VERSION COMMENT" + cmd.Short = `Post a comment.` + cmd.Long = `Post a comment. Posts a comment on a model version. A comment can be submitted either by a user or programmatically to display relevant information about the model. For - example, test results or deployment errors.`, + example, test results or deployment errors.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -133,46 +177,68 @@ var createCommentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createCommentOverrides { + fn(cmd, &createCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateComment()) + }) } // start create-model command -var createModelReq ml.CreateModelRequest -var createModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(createModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createModelOverrides []func( + *cobra.Command, + *ml.CreateModelRequest, +) + +func newCreateModel() *cobra.Command { + cmd := &cobra.Command{} + + var createModelReq ml.CreateModelRequest + var createModelJson flags.JsonFlag + // TODO: short flags - createModelCmd.Flags().Var(&createModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createModelCmd.Flags().StringVar(&createModelReq.Description, "description", createModelReq.Description, `Optional description for registered model.`) + cmd.Flags().StringVar(&createModelReq.Description, "description", createModelReq.Description, `Optional description for registered model.`) // TODO: array: tags -} - -var createModelCmd = &cobra.Command{ - Use: "create-model NAME", - Short: `Create a model.`, - Long: `Create a model. + cmd.Use = "create-model NAME" + cmd.Short = `Create a model.` + cmd.Long = `Create a model. Creates a new registered model with the name specified in the request body. Throws RESOURCE_ALREADY_EXISTS if a registered model with the given name - exists.`, + exists.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -190,45 +256,67 @@ var createModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createModelOverrides { + fn(cmd, &createModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateModel()) + }) } // start create-model-version command -var createModelVersionReq ml.CreateModelVersionRequest -var createModelVersionJson flags.JsonFlag -func init() { - Cmd.AddCommand(createModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createModelVersionOverrides []func( + *cobra.Command, + *ml.CreateModelVersionRequest, +) + +func newCreateModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var createModelVersionReq ml.CreateModelVersionRequest + var createModelVersionJson flags.JsonFlag + // TODO: short flags - createModelVersionCmd.Flags().Var(&createModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.Description, "description", createModelVersionReq.Description, `Optional description for model version.`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.RunId, "run-id", createModelVersionReq.RunId, `MLflow run ID for correlation, if source was generated by an experiment run in MLflow tracking server.`) - createModelVersionCmd.Flags().StringVar(&createModelVersionReq.RunLink, "run-link", createModelVersionReq.RunLink, `MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.`) + cmd.Flags().StringVar(&createModelVersionReq.Description, "description", createModelVersionReq.Description, `Optional description for model version.`) + cmd.Flags().StringVar(&createModelVersionReq.RunId, "run-id", createModelVersionReq.RunId, `MLflow run ID for correlation, if source was generated by an experiment run in MLflow tracking server.`) + cmd.Flags().StringVar(&createModelVersionReq.RunLink, "run-link", createModelVersionReq.RunLink, `MLflow run link - this is the exact link of the run that generated this model version, potentially hosted at another instance of MLflow.`) // TODO: array: tags -} - -var createModelVersionCmd = &cobra.Command{ - Use: "create-model-version NAME SOURCE", - Short: `Create a model version.`, - Long: `Create a model version. + cmd.Use = "create-model-version NAME SOURCE" + cmd.Short = `Create a model version.` + cmd.Long = `Create a model version. - Creates a model version.`, + Creates a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -247,42 +335,64 @@ var createModelVersionCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createModelVersionOverrides { + fn(cmd, &createModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateModelVersion()) + }) } // start create-transition-request command -var createTransitionRequestReq ml.CreateTransitionRequest -var createTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(createTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createTransitionRequestOverrides []func( + *cobra.Command, + *ml.CreateTransitionRequest, +) + +func newCreateTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var createTransitionRequestReq ml.CreateTransitionRequest + var createTransitionRequestJson flags.JsonFlag + // TODO: short flags - createTransitionRequestCmd.Flags().Var(&createTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createTransitionRequestCmd.Flags().StringVar(&createTransitionRequestReq.Comment, "comment", createTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&createTransitionRequestReq.Comment, "comment", createTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var createTransitionRequestCmd = &cobra.Command{ - Use: "create-transition-request NAME VERSION STAGE", - Short: `Make a transition request.`, - Long: `Make a transition request. + cmd.Use = "create-transition-request NAME VERSION STAGE" + cmd.Short = `Make a transition request.` + cmd.Long = `Make a transition request. - Creates a model version stage transition request.`, + Creates a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -305,41 +415,62 @@ var createTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createTransitionRequestOverrides { + fn(cmd, &createTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateTransitionRequest()) + }) } // start create-webhook command -var createWebhookReq ml.CreateRegistryWebhook -var createWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(createWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createWebhookOverrides []func( + *cobra.Command, + *ml.CreateRegistryWebhook, +) + +func newCreateWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var createWebhookReq ml.CreateRegistryWebhook + var createWebhookJson flags.JsonFlag + // TODO: short flags - createWebhookCmd.Flags().Var(&createWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createWebhookCmd.Flags().StringVar(&createWebhookReq.Description, "description", createWebhookReq.Description, `User-specified description for the webhook.`) + cmd.Flags().StringVar(&createWebhookReq.Description, "description", createWebhookReq.Description, `User-specified description for the webhook.`) // TODO: complex arg: http_url_spec // TODO: complex arg: job_spec - createWebhookCmd.Flags().StringVar(&createWebhookReq.ModelName, "model-name", createWebhookReq.ModelName, `Name of the model whose events would trigger this webhook.`) - createWebhookCmd.Flags().Var(&createWebhookReq.Status, "status", `This describes an enum.`) + cmd.Flags().StringVar(&createWebhookReq.ModelName, "model-name", createWebhookReq.ModelName, `Name of the model whose events would trigger this webhook.`) + cmd.Flags().Var(&createWebhookReq.Status, "status", `This describes an enum.`) -} - -var createWebhookCmd = &cobra.Command{ - Use: "create-webhook", - Short: `Create a webhook.`, - Long: `Create a webhook. + cmd.Use = "create-webhook" + cmd.Short = `Create a webhook.` + cmd.Long = `Create a webhook. **NOTE**: This endpoint is in Public Preview. - Creates a registry webhook.`, + Creates a registry webhook.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -357,35 +488,57 @@ var createWebhookCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createWebhookOverrides { + fn(cmd, &createWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateWebhook()) + }) } // start delete-comment command -var deleteCommentReq ml.DeleteCommentRequest -func init() { - Cmd.AddCommand(deleteCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteCommentOverrides []func( + *cobra.Command, + *ml.DeleteCommentRequest, +) + +func newDeleteComment() *cobra.Command { + cmd := &cobra.Command{} + + var deleteCommentReq ml.DeleteCommentRequest + // TODO: short flags -} - -var deleteCommentCmd = &cobra.Command{ - Use: "delete-comment ID", - Short: `Delete a comment.`, - Long: `Delete a comment. + cmd.Use = "delete-comment ID" + cmd.Short = `Delete a comment.` + cmd.Long = `Delete a comment. - Deletes a comment on a model version.`, + Deletes a comment on a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -396,35 +549,57 @@ var deleteCommentCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteCommentOverrides { + fn(cmd, &deleteCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteComment()) + }) } // start delete-model command -var deleteModelReq ml.DeleteModelRequest -func init() { - Cmd.AddCommand(deleteModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelOverrides []func( + *cobra.Command, + *ml.DeleteModelRequest, +) + +func newDeleteModel() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelReq ml.DeleteModelRequest + // TODO: short flags -} - -var deleteModelCmd = &cobra.Command{ - Use: "delete-model NAME", - Short: `Delete a model.`, - Long: `Delete a model. + cmd.Use = "delete-model NAME" + cmd.Short = `Delete a model.` + cmd.Long = `Delete a model. - Deletes a registered model.`, + Deletes a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -435,35 +610,57 @@ var deleteModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelOverrides { + fn(cmd, &deleteModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModel()) + }) } // start delete-model-tag command -var deleteModelTagReq ml.DeleteModelTagRequest -func init() { - Cmd.AddCommand(deleteModelTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelTagOverrides []func( + *cobra.Command, + *ml.DeleteModelTagRequest, +) + +func newDeleteModelTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelTagReq ml.DeleteModelTagRequest + // TODO: short flags -} - -var deleteModelTagCmd = &cobra.Command{ - Use: "delete-model-tag NAME KEY", - Short: `Delete a model tag.`, - Long: `Delete a model tag. + cmd.Use = "delete-model-tag NAME KEY" + cmd.Short = `Delete a model tag.` + cmd.Long = `Delete a model tag. - Deletes the tag for a registered model.`, + Deletes the tag for a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -475,35 +672,57 @@ var deleteModelTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelTagOverrides { + fn(cmd, &deleteModelTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelTag()) + }) } // start delete-model-version command -var deleteModelVersionReq ml.DeleteModelVersionRequest -func init() { - Cmd.AddCommand(deleteModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelVersionOverrides []func( + *cobra.Command, + *ml.DeleteModelVersionRequest, +) + +func newDeleteModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelVersionReq ml.DeleteModelVersionRequest + // TODO: short flags -} - -var deleteModelVersionCmd = &cobra.Command{ - Use: "delete-model-version NAME VERSION", - Short: `Delete a model version.`, - Long: `Delete a model version. + cmd.Use = "delete-model-version NAME VERSION" + cmd.Short = `Delete a model version.` + cmd.Long = `Delete a model version. - Deletes a model version.`, + Deletes a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -515,35 +734,57 @@ var deleteModelVersionCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelVersionOverrides { + fn(cmd, &deleteModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelVersion()) + }) } // start delete-model-version-tag command -var deleteModelVersionTagReq ml.DeleteModelVersionTagRequest -func init() { - Cmd.AddCommand(deleteModelVersionTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteModelVersionTagOverrides []func( + *cobra.Command, + *ml.DeleteModelVersionTagRequest, +) + +func newDeleteModelVersionTag() *cobra.Command { + cmd := &cobra.Command{} + + var deleteModelVersionTagReq ml.DeleteModelVersionTagRequest + // TODO: short flags -} - -var deleteModelVersionTagCmd = &cobra.Command{ - Use: "delete-model-version-tag NAME VERSION KEY", - Short: `Delete a model version tag.`, - Long: `Delete a model version tag. + cmd.Use = "delete-model-version-tag NAME VERSION KEY" + cmd.Short = `Delete a model version tag.` + cmd.Long = `Delete a model version tag. - Deletes a model version tag.`, + Deletes a model version tag.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -556,37 +797,59 @@ var deleteModelVersionTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteModelVersionTagOverrides { + fn(cmd, &deleteModelVersionTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteModelVersionTag()) + }) } // start delete-transition-request command -var deleteTransitionRequestReq ml.DeleteTransitionRequestRequest -func init() { - Cmd.AddCommand(deleteTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteTransitionRequestOverrides []func( + *cobra.Command, + *ml.DeleteTransitionRequestRequest, +) + +func newDeleteTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var deleteTransitionRequestReq ml.DeleteTransitionRequestRequest + // TODO: short flags - deleteTransitionRequestCmd.Flags().StringVar(&deleteTransitionRequestReq.Comment, "comment", deleteTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&deleteTransitionRequestReq.Comment, "comment", deleteTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var deleteTransitionRequestCmd = &cobra.Command{ - Use: "delete-transition-request NAME VERSION STAGE CREATOR", - Short: `Delete a transition request.`, - Long: `Delete a transition request. + cmd.Use = "delete-transition-request NAME VERSION STAGE CREATOR" + cmd.Short = `Delete a transition request.` + cmd.Long = `Delete a transition request. - Cancels a model version stage transition request.`, + Cancels a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -603,44 +866,66 @@ var deleteTransitionRequestCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteTransitionRequestOverrides { + fn(cmd, &deleteTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteTransitionRequest()) + }) } // start delete-webhook command -var deleteWebhookReq ml.DeleteWebhookRequest -var deleteWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteWebhookOverrides []func( + *cobra.Command, + *ml.DeleteWebhookRequest, +) + +func newDeleteWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var deleteWebhookReq ml.DeleteWebhookRequest + var deleteWebhookJson flags.JsonFlag + // TODO: short flags - deleteWebhookCmd.Flags().Var(&deleteWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - deleteWebhookCmd.Flags().StringVar(&deleteWebhookReq.Id, "id", deleteWebhookReq.Id, `Webhook ID required to delete a registry webhook.`) + cmd.Flags().StringVar(&deleteWebhookReq.Id, "id", deleteWebhookReq.Id, `Webhook ID required to delete a registry webhook.`) -} - -var deleteWebhookCmd = &cobra.Command{ - Use: "delete-webhook", - Short: `Delete a webhook.`, - Long: `Delete a webhook. + cmd.Use = "delete-webhook" + cmd.Short = `Delete a webhook.` + cmd.Long = `Delete a webhook. **NOTE:** This endpoint is in Public Preview. - Deletes a registry webhook.`, + Deletes a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -657,42 +942,64 @@ var deleteWebhookCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteWebhookOverrides { + fn(cmd, &deleteWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteWebhook()) + }) } // start get-latest-versions command -var getLatestVersionsReq ml.GetLatestVersionsRequest -var getLatestVersionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(getLatestVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getLatestVersionsOverrides []func( + *cobra.Command, + *ml.GetLatestVersionsRequest, +) + +func newGetLatestVersions() *cobra.Command { + cmd := &cobra.Command{} + + var getLatestVersionsReq ml.GetLatestVersionsRequest + var getLatestVersionsJson flags.JsonFlag + // TODO: short flags - getLatestVersionsCmd.Flags().Var(&getLatestVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&getLatestVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: stages -} - -var getLatestVersionsCmd = &cobra.Command{ - Use: "get-latest-versions NAME", - Short: `Get the latest version.`, - Long: `Get the latest version. + cmd.Use = "get-latest-versions NAME" + cmd.Short = `Get the latest version.` + cmd.Long = `Get the latest version. - Gets the latest version of a registered model.`, + Gets the latest version of a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -710,39 +1017,61 @@ var getLatestVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getLatestVersionsOverrides { + fn(cmd, &getLatestVersionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetLatestVersions()) + }) } // start get-model command -var getModelReq ml.GetModelRequest -func init() { - Cmd.AddCommand(getModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelOverrides []func( + *cobra.Command, + *ml.GetModelRequest, +) + +func newGetModel() *cobra.Command { + cmd := &cobra.Command{} + + var getModelReq ml.GetModelRequest + // TODO: short flags -} - -var getModelCmd = &cobra.Command{ - Use: "get-model NAME", - Short: `Get model.`, - Long: `Get model. + cmd.Use = "get-model NAME" + cmd.Short = `Get model.` + cmd.Long = `Get model. Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also returns the model's Databricks workspace ID and the permission level of the requesting user on the model. - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel`, + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -753,35 +1082,57 @@ var getModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelOverrides { + fn(cmd, &getModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModel()) + }) } // start get-model-version command -var getModelVersionReq ml.GetModelVersionRequest -func init() { - Cmd.AddCommand(getModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelVersionOverrides []func( + *cobra.Command, + *ml.GetModelVersionRequest, +) + +func newGetModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var getModelVersionReq ml.GetModelVersionRequest + // TODO: short flags -} - -var getModelVersionCmd = &cobra.Command{ - Use: "get-model-version NAME VERSION", - Short: `Get a model version.`, - Long: `Get a model version. + cmd.Use = "get-model-version NAME VERSION" + cmd.Short = `Get a model version.` + cmd.Long = `Get a model version. - Get a model version.`, + Get a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -793,35 +1144,57 @@ var getModelVersionCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelVersionOverrides { + fn(cmd, &getModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModelVersion()) + }) } // start get-model-version-download-uri command -var getModelVersionDownloadUriReq ml.GetModelVersionDownloadUriRequest -func init() { - Cmd.AddCommand(getModelVersionDownloadUriCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getModelVersionDownloadUriOverrides []func( + *cobra.Command, + *ml.GetModelVersionDownloadUriRequest, +) + +func newGetModelVersionDownloadUri() *cobra.Command { + cmd := &cobra.Command{} + + var getModelVersionDownloadUriReq ml.GetModelVersionDownloadUriRequest + // TODO: short flags -} - -var getModelVersionDownloadUriCmd = &cobra.Command{ - Use: "get-model-version-download-uri NAME VERSION", - Short: `Get a model version URI.`, - Long: `Get a model version URI. + cmd.Use = "get-model-version-download-uri NAME VERSION" + cmd.Short = `Get a model version URI.` + cmd.Long = `Get a model version URI. - Gets a URI to download the model version.`, + Gets a URI to download the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -833,44 +1206,66 @@ var getModelVersionDownloadUriCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getModelVersionDownloadUriOverrides { + fn(cmd, &getModelVersionDownloadUriReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetModelVersionDownloadUri()) + }) } // start list-models command -var listModelsReq ml.ListModelsRequest -var listModelsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listModelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listModelsOverrides []func( + *cobra.Command, + *ml.ListModelsRequest, +) + +func newListModels() *cobra.Command { + cmd := &cobra.Command{} + + var listModelsReq ml.ListModelsRequest + var listModelsJson flags.JsonFlag + // TODO: short flags - listModelsCmd.Flags().Var(&listModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listModelsCmd.Flags().IntVar(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) - listModelsCmd.Flags().StringVar(&listModelsReq.PageToken, "page-token", listModelsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) + cmd.Flags().IntVar(&listModelsReq.MaxResults, "max-results", listModelsReq.MaxResults, `Maximum number of registered models desired.`) + cmd.Flags().StringVar(&listModelsReq.PageToken, "page-token", listModelsReq.PageToken, `Pagination token to go to the next page based on a previous query.`) -} - -var listModelsCmd = &cobra.Command{ - Use: "list-models", - Short: `List models.`, - Long: `List models. + cmd.Use = "list-models" + cmd.Short = `List models.` + cmd.Long = `List models. Lists all available registered models, up to the limit specified in - __max_results__.`, + __max_results__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -887,35 +1282,57 @@ var listModelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listModelsOverrides { + fn(cmd, &listModelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListModels()) + }) } // start list-transition-requests command -var listTransitionRequestsReq ml.ListTransitionRequestsRequest -func init() { - Cmd.AddCommand(listTransitionRequestsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listTransitionRequestsOverrides []func( + *cobra.Command, + *ml.ListTransitionRequestsRequest, +) + +func newListTransitionRequests() *cobra.Command { + cmd := &cobra.Command{} + + var listTransitionRequestsReq ml.ListTransitionRequestsRequest + // TODO: short flags -} - -var listTransitionRequestsCmd = &cobra.Command{ - Use: "list-transition-requests NAME VERSION", - Short: `List transition requests.`, - Long: `List transition requests. + cmd.Use = "list-transition-requests NAME VERSION" + cmd.Short = `List transition requests.` + cmd.Long = `List transition requests. - Gets a list of all open stage transition requests for the model version.`, + Gets a list of all open stage transition requests for the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -927,46 +1344,68 @@ var listTransitionRequestsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listTransitionRequestsOverrides { + fn(cmd, &listTransitionRequestsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListTransitionRequests()) + }) } // start list-webhooks command -var listWebhooksReq ml.ListWebhooksRequest -var listWebhooksJson flags.JsonFlag -func init() { - Cmd.AddCommand(listWebhooksCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listWebhooksOverrides []func( + *cobra.Command, + *ml.ListWebhooksRequest, +) + +func newListWebhooks() *cobra.Command { + cmd := &cobra.Command{} + + var listWebhooksReq ml.ListWebhooksRequest + var listWebhooksJson flags.JsonFlag + // TODO: short flags - listWebhooksCmd.Flags().Var(&listWebhooksJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listWebhooksJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: events - listWebhooksCmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) - listWebhooksCmd.Flags().StringVar(&listWebhooksReq.PageToken, "page-token", listWebhooksReq.PageToken, `Token indicating the page of artifact results to fetch.`) + cmd.Flags().StringVar(&listWebhooksReq.ModelName, "model-name", listWebhooksReq.ModelName, `If not specified, all webhooks associated with the specified events are listed, regardless of their associated model.`) + cmd.Flags().StringVar(&listWebhooksReq.PageToken, "page-token", listWebhooksReq.PageToken, `Token indicating the page of artifact results to fetch.`) -} - -var listWebhooksCmd = &cobra.Command{ - Use: "list-webhooks", - Short: `List registry webhooks.`, - Long: `List registry webhooks. + cmd.Use = "list-webhooks" + cmd.Short = `List registry webhooks.` + cmd.Long = `List registry webhooks. **NOTE:** This endpoint is in Public Preview. - Lists all registry webhooks.`, + Lists all registry webhooks.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -983,42 +1422,64 @@ var listWebhooksCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listWebhooksOverrides { + fn(cmd, &listWebhooksReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListWebhooks()) + }) } // start reject-transition-request command -var rejectTransitionRequestReq ml.RejectTransitionRequest -var rejectTransitionRequestJson flags.JsonFlag -func init() { - Cmd.AddCommand(rejectTransitionRequestCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var rejectTransitionRequestOverrides []func( + *cobra.Command, + *ml.RejectTransitionRequest, +) + +func newRejectTransitionRequest() *cobra.Command { + cmd := &cobra.Command{} + + var rejectTransitionRequestReq ml.RejectTransitionRequest + var rejectTransitionRequestJson flags.JsonFlag + // TODO: short flags - rejectTransitionRequestCmd.Flags().Var(&rejectTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&rejectTransitionRequestJson, "json", `either inline JSON string or @path/to/file.json with request body`) - rejectTransitionRequestCmd.Flags().StringVar(&rejectTransitionRequestReq.Comment, "comment", rejectTransitionRequestReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&rejectTransitionRequestReq.Comment, "comment", rejectTransitionRequestReq.Comment, `User-provided comment on the action.`) -} - -var rejectTransitionRequestCmd = &cobra.Command{ - Use: "reject-transition-request NAME VERSION STAGE", - Short: `Reject a transition request.`, - Long: `Reject a transition request. + cmd.Use = "reject-transition-request NAME VERSION STAGE" + cmd.Short = `Reject a transition request.` + cmd.Long = `Reject a transition request. - Rejects a model version stage transition request.`, + Rejects a model version stage transition request.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1041,42 +1502,64 @@ var rejectTransitionRequestCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range rejectTransitionRequestOverrides { + fn(cmd, &rejectTransitionRequestReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRejectTransitionRequest()) + }) } // start rename-model command -var renameModelReq ml.RenameModelRequest -var renameModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(renameModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var renameModelOverrides []func( + *cobra.Command, + *ml.RenameModelRequest, +) + +func newRenameModel() *cobra.Command { + cmd := &cobra.Command{} + + var renameModelReq ml.RenameModelRequest + var renameModelJson flags.JsonFlag + // TODO: short flags - renameModelCmd.Flags().Var(&renameModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&renameModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - renameModelCmd.Flags().StringVar(&renameModelReq.NewName, "new-name", renameModelReq.NewName, `If provided, updates the name for this registered_model.`) + cmd.Flags().StringVar(&renameModelReq.NewName, "new-name", renameModelReq.NewName, `If provided, updates the name for this registered_model.`) -} - -var renameModelCmd = &cobra.Command{ - Use: "rename-model NAME", - Short: `Rename a model.`, - Long: `Rename a model. + cmd.Use = "rename-model NAME" + cmd.Short = `Rename a model.` + cmd.Long = `Rename a model. - Renames a registered model.`, + Renames a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1094,45 +1577,67 @@ var renameModelCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range renameModelOverrides { + fn(cmd, &renameModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRenameModel()) + }) } // start search-model-versions command -var searchModelVersionsReq ml.SearchModelVersionsRequest -var searchModelVersionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchModelVersionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchModelVersionsOverrides []func( + *cobra.Command, + *ml.SearchModelVersionsRequest, +) + +func newSearchModelVersions() *cobra.Command { + cmd := &cobra.Command{} + + var searchModelVersionsReq ml.SearchModelVersionsRequest + var searchModelVersionsJson flags.JsonFlag + // TODO: short flags - searchModelVersionsCmd.Flags().Var(&searchModelVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchModelVersionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchModelVersionsCmd.Flags().StringVar(&searchModelVersionsReq.Filter, "filter", searchModelVersionsReq.Filter, `String filter condition, like "name='my-model-name'".`) - searchModelVersionsCmd.Flags().IntVar(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().StringVar(&searchModelVersionsReq.Filter, "filter", searchModelVersionsReq.Filter, `String filter condition, like "name='my-model-name'".`) + cmd.Flags().IntVar(&searchModelVersionsReq.MaxResults, "max-results", searchModelVersionsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by - searchModelVersionsCmd.Flags().StringVar(&searchModelVersionsReq.PageToken, "page-token", searchModelVersionsReq.PageToken, `Pagination token to go to next page based on previous search query.`) + cmd.Flags().StringVar(&searchModelVersionsReq.PageToken, "page-token", searchModelVersionsReq.PageToken, `Pagination token to go to next page based on previous search query.`) -} - -var searchModelVersionsCmd = &cobra.Command{ - Use: "search-model-versions", - Short: `Searches model versions.`, - Long: `Searches model versions. + cmd.Use = "search-model-versions" + cmd.Short = `Searches model versions.` + cmd.Long = `Searches model versions. - Searches for specific model versions based on the supplied __filter__.`, + Searches for specific model versions based on the supplied __filter__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1149,45 +1654,67 @@ var searchModelVersionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchModelVersionsOverrides { + fn(cmd, &searchModelVersionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchModelVersions()) + }) } // start search-models command -var searchModelsReq ml.SearchModelsRequest -var searchModelsJson flags.JsonFlag -func init() { - Cmd.AddCommand(searchModelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var searchModelsOverrides []func( + *cobra.Command, + *ml.SearchModelsRequest, +) + +func newSearchModels() *cobra.Command { + cmd := &cobra.Command{} + + var searchModelsReq ml.SearchModelsRequest + var searchModelsJson flags.JsonFlag + // TODO: short flags - searchModelsCmd.Flags().Var(&searchModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&searchModelsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - searchModelsCmd.Flags().StringVar(&searchModelsReq.Filter, "filter", searchModelsReq.Filter, `String filter condition, like "name LIKE 'my-model-name'".`) - searchModelsCmd.Flags().IntVar(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) + cmd.Flags().StringVar(&searchModelsReq.Filter, "filter", searchModelsReq.Filter, `String filter condition, like "name LIKE 'my-model-name'".`) + cmd.Flags().IntVar(&searchModelsReq.MaxResults, "max-results", searchModelsReq.MaxResults, `Maximum number of models desired.`) // TODO: array: order_by - searchModelsCmd.Flags().StringVar(&searchModelsReq.PageToken, "page-token", searchModelsReq.PageToken, `Pagination token to go to the next page based on a previous search query.`) + cmd.Flags().StringVar(&searchModelsReq.PageToken, "page-token", searchModelsReq.PageToken, `Pagination token to go to the next page based on a previous search query.`) -} - -var searchModelsCmd = &cobra.Command{ - Use: "search-models", - Short: `Search models.`, - Long: `Search models. + cmd.Use = "search-models" + cmd.Short = `Search models.` + cmd.Long = `Search models. - Search for registered models based on the specified __filter__.`, + Search for registered models based on the specified __filter__.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1204,40 +1731,62 @@ var searchModelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range searchModelsOverrides { + fn(cmd, &searchModelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSearchModels()) + }) } // start set-model-tag command -var setModelTagReq ml.SetModelTagRequest -var setModelTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setModelTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setModelTagOverrides []func( + *cobra.Command, + *ml.SetModelTagRequest, +) + +func newSetModelTag() *cobra.Command { + cmd := &cobra.Command{} + + var setModelTagReq ml.SetModelTagRequest + var setModelTagJson flags.JsonFlag + // TODO: short flags - setModelTagCmd.Flags().Var(&setModelTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setModelTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setModelTagCmd = &cobra.Command{ - Use: "set-model-tag NAME KEY VALUE", - Short: `Set a tag.`, - Long: `Set a tag. + cmd.Use = "set-model-tag NAME KEY VALUE" + cmd.Short = `Set a tag.` + cmd.Long = `Set a tag. - Sets a tag on a registered model.`, + Sets a tag on a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1257,40 +1806,62 @@ var setModelTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setModelTagOverrides { + fn(cmd, &setModelTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetModelTag()) + }) } // start set-model-version-tag command -var setModelVersionTagReq ml.SetModelVersionTagRequest -var setModelVersionTagJson flags.JsonFlag -func init() { - Cmd.AddCommand(setModelVersionTagCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setModelVersionTagOverrides []func( + *cobra.Command, + *ml.SetModelVersionTagRequest, +) + +func newSetModelVersionTag() *cobra.Command { + cmd := &cobra.Command{} + + var setModelVersionTagReq ml.SetModelVersionTagRequest + var setModelVersionTagJson flags.JsonFlag + // TODO: short flags - setModelVersionTagCmd.Flags().Var(&setModelVersionTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setModelVersionTagJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setModelVersionTagCmd = &cobra.Command{ - Use: "set-model-version-tag NAME VERSION KEY VALUE", - Short: `Set a version tag.`, - Long: `Set a version tag. + cmd.Use = "set-model-version-tag NAME VERSION KEY VALUE" + cmd.Short = `Set a version tag.` + cmd.Long = `Set a version tag. - Sets a model version tag.`, + Sets a model version tag.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1311,44 +1882,66 @@ var setModelVersionTagCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setModelVersionTagOverrides { + fn(cmd, &setModelVersionTagReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetModelVersionTag()) + }) } // start test-registry-webhook command -var testRegistryWebhookReq ml.TestRegistryWebhookRequest -var testRegistryWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(testRegistryWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var testRegistryWebhookOverrides []func( + *cobra.Command, + *ml.TestRegistryWebhookRequest, +) + +func newTestRegistryWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var testRegistryWebhookReq ml.TestRegistryWebhookRequest + var testRegistryWebhookJson flags.JsonFlag + // TODO: short flags - testRegistryWebhookCmd.Flags().Var(&testRegistryWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&testRegistryWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - testRegistryWebhookCmd.Flags().Var(&testRegistryWebhookReq.Event, "event", `If event is specified, the test trigger uses the specified event.`) + cmd.Flags().Var(&testRegistryWebhookReq.Event, "event", `If event is specified, the test trigger uses the specified event.`) -} - -var testRegistryWebhookCmd = &cobra.Command{ - Use: "test-registry-webhook ID", - Short: `Test a webhook.`, - Long: `Test a webhook. + cmd.Use = "test-registry-webhook ID" + cmd.Short = `Test a webhook.` + cmd.Long = `Test a webhook. **NOTE:** This endpoint is in Public Preview. - Tests a registry webhook.`, + Tests a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1366,46 +1959,68 @@ var testRegistryWebhookCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range testRegistryWebhookOverrides { + fn(cmd, &testRegistryWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newTestRegistryWebhook()) + }) } // start transition-stage command -var transitionStageReq ml.TransitionModelVersionStageDatabricks -var transitionStageJson flags.JsonFlag -func init() { - Cmd.AddCommand(transitionStageCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var transitionStageOverrides []func( + *cobra.Command, + *ml.TransitionModelVersionStageDatabricks, +) + +func newTransitionStage() *cobra.Command { + cmd := &cobra.Command{} + + var transitionStageReq ml.TransitionModelVersionStageDatabricks + var transitionStageJson flags.JsonFlag + // TODO: short flags - transitionStageCmd.Flags().Var(&transitionStageJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&transitionStageJson, "json", `either inline JSON string or @path/to/file.json with request body`) - transitionStageCmd.Flags().StringVar(&transitionStageReq.Comment, "comment", transitionStageReq.Comment, `User-provided comment on the action.`) + cmd.Flags().StringVar(&transitionStageReq.Comment, "comment", transitionStageReq.Comment, `User-provided comment on the action.`) -} - -var transitionStageCmd = &cobra.Command{ - Use: "transition-stage NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS", - Short: `Transition a stage.`, - Long: `Transition a stage. + cmd.Use = "transition-stage NAME VERSION STAGE ARCHIVE_EXISTING_VERSIONS" + cmd.Short = `Transition a stage.` + cmd.Long = `Transition a stage. Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint] that also accepts a comment associated with the transition to be recorded.", - [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage`, + [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1432,40 +2047,62 @@ var transitionStageCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range transitionStageOverrides { + fn(cmd, &transitionStageReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newTransitionStage()) + }) } // start update-comment command -var updateCommentReq ml.UpdateComment -var updateCommentJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCommentCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateCommentOverrides []func( + *cobra.Command, + *ml.UpdateComment, +) + +func newUpdateComment() *cobra.Command { + cmd := &cobra.Command{} + + var updateCommentReq ml.UpdateComment + var updateCommentJson flags.JsonFlag + // TODO: short flags - updateCommentCmd.Flags().Var(&updateCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateCommentJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var updateCommentCmd = &cobra.Command{ - Use: "update-comment ID COMMENT", - Short: `Update a comment.`, - Long: `Update a comment. + cmd.Use = "update-comment ID COMMENT" + cmd.Short = `Update a comment.` + cmd.Long = `Update a comment. - Post an edit to a comment on a model version.`, + Post an edit to a comment on a model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1484,42 +2121,64 @@ var updateCommentCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateCommentOverrides { + fn(cmd, &updateCommentReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateComment()) + }) } // start update-model command -var updateModelReq ml.UpdateModelRequest -var updateModelJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateModelCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateModelOverrides []func( + *cobra.Command, + *ml.UpdateModelRequest, +) + +func newUpdateModel() *cobra.Command { + cmd := &cobra.Command{} + + var updateModelReq ml.UpdateModelRequest + var updateModelJson flags.JsonFlag + // TODO: short flags - updateModelCmd.Flags().Var(&updateModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateModelJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateModelCmd.Flags().StringVar(&updateModelReq.Description, "description", updateModelReq.Description, `If provided, updates the description for this registered_model.`) + cmd.Flags().StringVar(&updateModelReq.Description, "description", updateModelReq.Description, `If provided, updates the description for this registered_model.`) -} - -var updateModelCmd = &cobra.Command{ - Use: "update-model NAME", - Short: `Update model.`, - Long: `Update model. + cmd.Use = "update-model NAME" + cmd.Short = `Update model.` + cmd.Long = `Update model. - Updates a registered model.`, + Updates a registered model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1537,42 +2196,64 @@ var updateModelCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateModelOverrides { + fn(cmd, &updateModelReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateModel()) + }) } // start update-model-version command -var updateModelVersionReq ml.UpdateModelVersionRequest -var updateModelVersionJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateModelVersionCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateModelVersionOverrides []func( + *cobra.Command, + *ml.UpdateModelVersionRequest, +) + +func newUpdateModelVersion() *cobra.Command { + cmd := &cobra.Command{} + + var updateModelVersionReq ml.UpdateModelVersionRequest + var updateModelVersionJson flags.JsonFlag + // TODO: short flags - updateModelVersionCmd.Flags().Var(&updateModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateModelVersionJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateModelVersionCmd.Flags().StringVar(&updateModelVersionReq.Description, "description", updateModelVersionReq.Description, `If provided, updates the description for this registered_model.`) + cmd.Flags().StringVar(&updateModelVersionReq.Description, "description", updateModelVersionReq.Description, `If provided, updates the description for this registered_model.`) -} - -var updateModelVersionCmd = &cobra.Command{ - Use: "update-model-version NAME VERSION", - Short: `Update model version.`, - Long: `Update model version. + cmd.Use = "update-model-version NAME VERSION" + cmd.Short = `Update model version.` + cmd.Long = `Update model version. - Updates the model version.`, + Updates the model version.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1591,48 +2272,70 @@ var updateModelVersionCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateModelVersionOverrides { + fn(cmd, &updateModelVersionReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateModelVersion()) + }) } // start update-webhook command -var updateWebhookReq ml.UpdateRegistryWebhook -var updateWebhookJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateWebhookCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateWebhookOverrides []func( + *cobra.Command, + *ml.UpdateRegistryWebhook, +) + +func newUpdateWebhook() *cobra.Command { + cmd := &cobra.Command{} + + var updateWebhookReq ml.UpdateRegistryWebhook + var updateWebhookJson flags.JsonFlag + // TODO: short flags - updateWebhookCmd.Flags().Var(&updateWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateWebhookJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateWebhookCmd.Flags().StringVar(&updateWebhookReq.Description, "description", updateWebhookReq.Description, `User-specified description for the webhook.`) + cmd.Flags().StringVar(&updateWebhookReq.Description, "description", updateWebhookReq.Description, `User-specified description for the webhook.`) // TODO: array: events // TODO: complex arg: http_url_spec // TODO: complex arg: job_spec - updateWebhookCmd.Flags().Var(&updateWebhookReq.Status, "status", `This describes an enum.`) + cmd.Flags().Var(&updateWebhookReq.Status, "status", `This describes an enum.`) -} - -var updateWebhookCmd = &cobra.Command{ - Use: "update-webhook ID", - Short: `Update a webhook.`, - Long: `Update a webhook. + cmd.Use = "update-webhook ID" + cmd.Short = `Update a webhook.` + cmd.Long = `Update a webhook. **NOTE:** This endpoint is in Public Preview. - Updates a registry webhook.`, + Updates a registry webhook.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -1650,10 +2353,24 @@ var updateWebhookCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateWebhookOverrides { + fn(cmd, &updateWebhookReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateWebhook()) + }) } // end service ModelRegistry diff --git a/cmd/workspace/permissions/permissions.go b/cmd/workspace/permissions/permissions.go index c92501207..39454b248 100755 --- a/cmd/workspace/permissions/permissions.go +++ b/cmd/workspace/permissions/permissions.go @@ -10,40 +10,62 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "permissions", - Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, - Long: `Permissions API are used to create read, write, edit, update and manage access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "permissions", + Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, + Long: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq iam.GetPermissionRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetPermissionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetPermissionRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Get object permissions.`, - Long: `Get object permissions. + cmd.Use = "get REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Get object permissions.` + cmd.Long = `Get object permissions. Gets the permission of an object. Objects can inherit permissions from their - parent objects or root objects.`, + parent objects or root objects.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -55,35 +77,57 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-permission-levels command -var getPermissionLevelsReq iam.GetPermissionLevelsRequest -func init() { - Cmd.AddCommand(getPermissionLevelsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *iam.GetPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq iam.GetPermissionLevelsRequest + // TODO: short flags -} - -var getPermissionLevelsCmd = &cobra.Command{ - Use: "get-permission-levels REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Get permission levels.`, - Long: `Get permission levels. + cmd.Use = "get-permission-levels REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Get permission levels.` + cmd.Long = `Get permission levels. - Gets the permission levels that a user can have on an object.`, + Gets the permission levels that a user can have on an object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -95,40 +139,62 @@ var getPermissionLevelsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) } // start set command -var setReq iam.PermissionsRequest -var setJson flags.JsonFlag -func init() { - Cmd.AddCommand(setCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setOverrides []func( + *cobra.Command, + *iam.PermissionsRequest, +) + +func newSet() *cobra.Command { + cmd := &cobra.Command{} + + var setReq iam.PermissionsRequest + var setJson flags.JsonFlag + // TODO: short flags - setCmd.Flags().Var(&setJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list -} - -var setCmd = &cobra.Command{ - Use: "set REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Set permissions.`, - Long: `Set permissions. + cmd.Use = "set REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Set permissions.` + cmd.Long = `Set permissions. Sets permissions on object. Objects can inherit permissions from their parent - objects and root objects.`, + objects and root objects.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -146,39 +212,61 @@ var setCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setOverrides { + fn(cmd, &setReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSet()) + }) } // start update command -var updateReq iam.PermissionsRequest -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.PermissionsRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.PermissionsRequest + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: access_control_list -} - -var updateCmd = &cobra.Command{ - Use: "update REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID", - Short: `Update permission.`, - Long: `Update permission. + cmd.Use = "update REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" + cmd.Short = `Update permission.` + cmd.Long = `Update permission. - Updates the permissions on an object.`, + Updates the permissions on an object.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -196,10 +284,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Permissions diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 10f37846d..652af8987 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "pipelines", - Short: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.`, - Long: `The Delta Live Tables API allows you to create, edit, delete, start, and view +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "pipelines", + Short: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.`, + Long: `The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines. Delta Live Tables is a framework for building reliable, maintainable, and @@ -30,40 +35,57 @@ var Cmd = &cobra.Command{ quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations.`, - Annotations: map[string]string{ - "package": "pipelines", - }, + GroupID: "pipelines", + Annotations: map[string]string{ + "package": "pipelines", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq pipelines.CreatePipeline -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *pipelines.CreatePipeline, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq pipelines.CreatePipeline + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a pipeline.`, - Long: `Create a pipeline. + cmd.Use = "create" + cmd.Short = `Create a pipeline.` + cmd.Long = `Create a pipeline. Creates a new data processing pipeline based on the requested configuration. - If successful, this method returns the ID of the new pipeline.`, + If successful, this method returns the ID of the new pipeline.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,51 +103,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq pipelines.DeletePipelineRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *pipelines.DeletePipelineRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq pipelines.DeletePipelineRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete PIPELINE_ID", - Short: `Delete a pipeline.`, - Long: `Delete a pipeline. + cmd.Use = "delete PIPELINE_ID" + cmd.Short = `Delete a pipeline.` + cmd.Long = `Delete a pipeline. - Deletes a pipeline.`, + Deletes a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.PipelineId = args[0] err = w.Pipelines.Delete(ctx, deleteReq) @@ -133,55 +164,63 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq pipelines.GetPipelineRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *pipelines.GetPipelineRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq pipelines.GetPipelineRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} + cmd.Use = "get PIPELINE_ID" + cmd.Short = `Get a pipeline.` + cmd.Long = `Get a pipeline.` -var getCmd = &cobra.Command{ - Use: "get PIPELINE_ID", - Short: `Get a pipeline.`, - Long: `Get a pipeline.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.PipelineId = args[0] response, err := w.Pipelines.Get(ctx, getReq) @@ -189,35 +228,57 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-update command -var getUpdateReq pipelines.GetUpdateRequest -func init() { - Cmd.AddCommand(getUpdateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getUpdateOverrides []func( + *cobra.Command, + *pipelines.GetUpdateRequest, +) + +func newGetUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var getUpdateReq pipelines.GetUpdateRequest + // TODO: short flags -} - -var getUpdateCmd = &cobra.Command{ - Use: "get-update PIPELINE_ID UPDATE_ID", - Short: `Get a pipeline update.`, - Long: `Get a pipeline update. + cmd.Use = "get-update PIPELINE_ID UPDATE_ID" + cmd.Short = `Get a pipeline update.` + cmd.Long = `Get a pipeline update. - Gets an update from an active pipeline.`, + Gets an update from an active pipeline.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -229,38 +290,64 @@ var getUpdateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getUpdateOverrides { + fn(cmd, &getUpdateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetUpdate()) + }) } // start list-pipeline-events command -var listPipelineEventsReq pipelines.ListPipelineEventsRequest -var listPipelineEventsJson flags.JsonFlag -func init() { - Cmd.AddCommand(listPipelineEventsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listPipelineEventsOverrides []func( + *cobra.Command, + *pipelines.ListPipelineEventsRequest, +) + +func newListPipelineEvents() *cobra.Command { + cmd := &cobra.Command{} + + var listPipelineEventsReq pipelines.ListPipelineEventsRequest + var listPipelineEventsJson flags.JsonFlag + // TODO: short flags - listPipelineEventsCmd.Flags().Var(&listPipelineEventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listPipelineEventsJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listPipelineEventsCmd.Flags().StringVar(&listPipelineEventsReq.Filter, "filter", listPipelineEventsReq.Filter, `Criteria to select a subset of results, expressed using a SQL-like syntax.`) - listPipelineEventsCmd.Flags().IntVar(&listPipelineEventsReq.MaxResults, "max-results", listPipelineEventsReq.MaxResults, `Max number of entries to return in a single page.`) + cmd.Flags().StringVar(&listPipelineEventsReq.Filter, "filter", listPipelineEventsReq.Filter, `Criteria to select a subset of results, expressed using a SQL-like syntax.`) + cmd.Flags().IntVar(&listPipelineEventsReq.MaxResults, "max-results", listPipelineEventsReq.MaxResults, `Max number of entries to return in a single page.`) // TODO: array: order_by - listPipelineEventsCmd.Flags().StringVar(&listPipelineEventsReq.PageToken, "page-token", listPipelineEventsReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listPipelineEventsReq.PageToken, "page-token", listPipelineEventsReq.PageToken, `Page token returned by previous call.`) -} - -var listPipelineEventsCmd = &cobra.Command{ - Use: "list-pipeline-events PIPELINE_ID", - Short: `List pipeline events.`, - Long: `List pipeline events. + cmd.Use = "list-pipeline-events PIPELINE_ID" + cmd.Short = `List pipeline events.` + cmd.Long = `List pipeline events. - Retrieves events for a pipeline.`, + Retrieves events for a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -270,23 +357,6 @@ var listPipelineEventsCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } listPipelineEventsReq.PipelineId = args[0] response, err := w.Pipelines.ListPipelineEventsAll(ctx, listPipelineEventsReq) @@ -294,45 +364,67 @@ var listPipelineEventsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listPipelineEventsOverrides { + fn(cmd, &listPipelineEventsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListPipelineEvents()) + }) } // start list-pipelines command -var listPipelinesReq pipelines.ListPipelinesRequest -var listPipelinesJson flags.JsonFlag -func init() { - Cmd.AddCommand(listPipelinesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listPipelinesOverrides []func( + *cobra.Command, + *pipelines.ListPipelinesRequest, +) + +func newListPipelines() *cobra.Command { + cmd := &cobra.Command{} + + var listPipelinesReq pipelines.ListPipelinesRequest + var listPipelinesJson flags.JsonFlag + // TODO: short flags - listPipelinesCmd.Flags().Var(&listPipelinesJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listPipelinesJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listPipelinesCmd.Flags().StringVar(&listPipelinesReq.Filter, "filter", listPipelinesReq.Filter, `Select a subset of results based on the specified criteria.`) - listPipelinesCmd.Flags().IntVar(&listPipelinesReq.MaxResults, "max-results", listPipelinesReq.MaxResults, `The maximum number of entries to return in a single page.`) + cmd.Flags().StringVar(&listPipelinesReq.Filter, "filter", listPipelinesReq.Filter, `Select a subset of results based on the specified criteria.`) + cmd.Flags().IntVar(&listPipelinesReq.MaxResults, "max-results", listPipelinesReq.MaxResults, `The maximum number of entries to return in a single page.`) // TODO: array: order_by - listPipelinesCmd.Flags().StringVar(&listPipelinesReq.PageToken, "page-token", listPipelinesReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listPipelinesReq.PageToken, "page-token", listPipelinesReq.PageToken, `Page token returned by previous call.`) -} - -var listPipelinesCmd = &cobra.Command{ - Use: "list-pipelines", - Short: `List pipelines.`, - Long: `List pipelines. + cmd.Use = "list-pipelines" + cmd.Short = `List pipelines.` + cmd.Long = `List pipelines. - Lists pipelines defined in the Delta Live Tables system.`, + Lists pipelines defined in the Delta Live Tables system.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -349,55 +441,64 @@ var listPipelinesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listPipelinesOverrides { + fn(cmd, &listPipelinesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListPipelines()) + }) } // start list-updates command -var listUpdatesReq pipelines.ListUpdatesRequest -func init() { - Cmd.AddCommand(listUpdatesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listUpdatesOverrides []func( + *cobra.Command, + *pipelines.ListUpdatesRequest, +) + +func newListUpdates() *cobra.Command { + cmd := &cobra.Command{} + + var listUpdatesReq pipelines.ListUpdatesRequest + // TODO: short flags - listUpdatesCmd.Flags().IntVar(&listUpdatesReq.MaxResults, "max-results", listUpdatesReq.MaxResults, `Max number of entries to return in a single page.`) - listUpdatesCmd.Flags().StringVar(&listUpdatesReq.PageToken, "page-token", listUpdatesReq.PageToken, `Page token returned by previous call.`) - listUpdatesCmd.Flags().StringVar(&listUpdatesReq.UntilUpdateId, "until-update-id", listUpdatesReq.UntilUpdateId, `If present, returns updates until and including this update_id.`) + cmd.Flags().IntVar(&listUpdatesReq.MaxResults, "max-results", listUpdatesReq.MaxResults, `Max number of entries to return in a single page.`) + cmd.Flags().StringVar(&listUpdatesReq.PageToken, "page-token", listUpdatesReq.PageToken, `Page token returned by previous call.`) + cmd.Flags().StringVar(&listUpdatesReq.UntilUpdateId, "until-update-id", listUpdatesReq.UntilUpdateId, `If present, returns updates until and including this update_id.`) -} - -var listUpdatesCmd = &cobra.Command{ - Use: "list-updates PIPELINE_ID", - Short: `List pipeline updates.`, - Long: `List pipeline updates. + cmd.Use = "list-updates PIPELINE_ID" + cmd.Short = `List pipeline updates.` + cmd.Long = `List pipeline updates. - List updates for an active pipeline.`, + List updates for an active pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The pipeline to return updates for") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the pipeline to return updates for") - } listUpdatesReq.PipelineId = args[0] response, err := w.Pipelines.ListUpdates(ctx, listUpdatesReq) @@ -405,57 +506,65 @@ var listUpdatesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listUpdatesOverrides { + fn(cmd, &listUpdatesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListUpdates()) + }) } // start reset command -var resetReq pipelines.ResetRequest -var resetSkipWait bool -var resetTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var resetOverrides []func( + *cobra.Command, + *pipelines.ResetRequest, +) -func init() { - Cmd.AddCommand(resetCmd) +func newReset() *cobra.Command { + cmd := &cobra.Command{} - resetCmd.Flags().BoolVar(&resetSkipWait, "no-wait", resetSkipWait, `do not wait to reach RUNNING state`) - resetCmd.Flags().DurationVar(&resetTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var resetReq pipelines.ResetRequest + + var resetSkipWait bool + var resetTimeout time.Duration + + cmd.Flags().BoolVar(&resetSkipWait, "no-wait", resetSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&resetTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var resetCmd = &cobra.Command{ - Use: "reset PIPELINE_ID", - Short: `Reset a pipeline.`, - Long: `Reset a pipeline. + cmd.Use = "reset PIPELINE_ID" + cmd.Short = `Reset a pipeline.` + cmd.Long = `Reset a pipeline. - Resets a pipeline.`, + Resets a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } resetReq.PipelineId = args[0] wait, err := w.Pipelines.Reset(ctx, resetReq) @@ -475,38 +584,64 @@ var resetCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range resetOverrides { + fn(cmd, &resetReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReset()) + }) } // start start-update command -var startUpdateReq pipelines.StartUpdate -var startUpdateJson flags.JsonFlag -func init() { - Cmd.AddCommand(startUpdateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startUpdateOverrides []func( + *cobra.Command, + *pipelines.StartUpdate, +) + +func newStartUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var startUpdateReq pipelines.StartUpdate + var startUpdateJson flags.JsonFlag + // TODO: short flags - startUpdateCmd.Flags().Var(&startUpdateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&startUpdateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - startUpdateCmd.Flags().Var(&startUpdateReq.Cause, "cause", ``) - startUpdateCmd.Flags().BoolVar(&startUpdateReq.FullRefresh, "full-refresh", startUpdateReq.FullRefresh, `If true, this update will reset all tables before running.`) + cmd.Flags().Var(&startUpdateReq.Cause, "cause", ``) + cmd.Flags().BoolVar(&startUpdateReq.FullRefresh, "full-refresh", startUpdateReq.FullRefresh, `If true, this update will reset all tables before running.`) // TODO: array: full_refresh_selection // TODO: array: refresh_selection -} - -var startUpdateCmd = &cobra.Command{ - Use: "start-update PIPELINE_ID", - Short: `Queue a pipeline update.`, - Long: `Queue a pipeline update. + cmd.Use = "start-update PIPELINE_ID" + cmd.Short = `Queue a pipeline update.` + cmd.Long = `Queue a pipeline update. - Starts or queues a pipeline update.`, + Starts or queues a pipeline update.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -516,23 +651,6 @@ var startUpdateCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } startUpdateReq.PipelineId = args[0] response, err := w.Pipelines.StartUpdate(ctx, startUpdateReq) @@ -540,57 +658,65 @@ var startUpdateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startUpdateOverrides { + fn(cmd, &startUpdateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStartUpdate()) + }) } // start stop command -var stopReq pipelines.StopRequest -var stopSkipWait bool -var stopTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var stopOverrides []func( + *cobra.Command, + *pipelines.StopRequest, +) -func init() { - Cmd.AddCommand(stopCmd) +func newStop() *cobra.Command { + cmd := &cobra.Command{} - stopCmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach IDLE state`) - stopCmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach IDLE state`) + var stopReq pipelines.StopRequest + + var stopSkipWait bool + var stopTimeout time.Duration + + cmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach IDLE state`) + cmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach IDLE state`) // TODO: short flags -} - -var stopCmd = &cobra.Command{ - Use: "stop PIPELINE_ID", - Short: `Stop a pipeline.`, - Long: `Stop a pipeline. + cmd.Use = "stop PIPELINE_ID" + cmd.Short = `Stop a pipeline.` + cmd.Long = `Stop a pipeline. - Stops a pipeline.`, + Stops a pipeline.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } stopReq.PipelineId = args[0] wait, err := w.Pipelines.Stop(ctx, stopReq) @@ -610,53 +736,82 @@ var stopCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range stopOverrides { + fn(cmd, &stopReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStop()) + }) } // start update command -var updateReq pipelines.EditPipeline -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *pipelines.EditPipeline, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq pipelines.EditPipeline + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.AllowDuplicateNames, "allow-duplicate-names", updateReq.AllowDuplicateNames, `If false, deployment will fail if name has changed and conflicts the name of another pipeline.`) - updateCmd.Flags().StringVar(&updateReq.Catalog, "catalog", updateReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) - updateCmd.Flags().StringVar(&updateReq.Channel, "channel", updateReq.Channel, `DLT Release Channel that specifies which version to use.`) + cmd.Flags().BoolVar(&updateReq.AllowDuplicateNames, "allow-duplicate-names", updateReq.AllowDuplicateNames, `If false, deployment will fail if name has changed and conflicts the name of another pipeline.`) + cmd.Flags().StringVar(&updateReq.Catalog, "catalog", updateReq.Catalog, `A catalog in Unity Catalog to publish data from this pipeline to.`) + cmd.Flags().StringVar(&updateReq.Channel, "channel", updateReq.Channel, `DLT Release Channel that specifies which version to use.`) // TODO: array: clusters // TODO: map via StringToStringVar: configuration - updateCmd.Flags().BoolVar(&updateReq.Continuous, "continuous", updateReq.Continuous, `Whether the pipeline is continuous or triggered.`) - updateCmd.Flags().BoolVar(&updateReq.Development, "development", updateReq.Development, `Whether the pipeline is in Development mode.`) - updateCmd.Flags().StringVar(&updateReq.Edition, "edition", updateReq.Edition, `Pipeline product edition.`) - updateCmd.Flags().Int64Var(&updateReq.ExpectedLastModified, "expected-last-modified", updateReq.ExpectedLastModified, `If present, the last-modified time of the pipeline settings before the edit.`) + cmd.Flags().BoolVar(&updateReq.Continuous, "continuous", updateReq.Continuous, `Whether the pipeline is continuous or triggered.`) + cmd.Flags().BoolVar(&updateReq.Development, "development", updateReq.Development, `Whether the pipeline is in Development mode.`) + cmd.Flags().StringVar(&updateReq.Edition, "edition", updateReq.Edition, `Pipeline product edition.`) + cmd.Flags().Int64Var(&updateReq.ExpectedLastModified, "expected-last-modified", updateReq.ExpectedLastModified, `If present, the last-modified time of the pipeline settings before the edit.`) // TODO: complex arg: filters - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Unique identifier for this pipeline.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Unique identifier for this pipeline.`) // TODO: array: libraries - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Friendly identifier for this pipeline.`) - updateCmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) - updateCmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) - updateCmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) - updateCmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) - updateCmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Friendly identifier for this pipeline.`) + cmd.Flags().BoolVar(&updateReq.Photon, "photon", updateReq.Photon, `Whether Photon is enabled for this pipeline.`) + cmd.Flags().StringVar(&updateReq.PipelineId, "pipeline-id", updateReq.PipelineId, `Unique identifier for this pipeline.`) + cmd.Flags().BoolVar(&updateReq.Serverless, "serverless", updateReq.Serverless, `Whether serverless compute is enabled for this pipeline.`) + cmd.Flags().StringVar(&updateReq.Storage, "storage", updateReq.Storage, `DBFS root directory for storing checkpoints and tables.`) + cmd.Flags().StringVar(&updateReq.Target, "target", updateReq.Target, `Target schema (database) to add tables in this pipeline to.`) // TODO: complex arg: trigger -} - -var updateCmd = &cobra.Command{ - Use: "update PIPELINE_ID", - Short: `Edit a pipeline.`, - Long: `Edit a pipeline. + cmd.Use = "update PIPELINE_ID" + cmd.Short = `Edit a pipeline.` + cmd.Long = `Edit a pipeline. - Updates a pipeline with the supplied configuration.`, + Updates a pipeline with the supplied configuration.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -666,23 +821,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." - names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique identifier for this pipeline") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique identifier for this pipeline") - } updateReq.PipelineId = args[0] } @@ -691,10 +829,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Pipelines diff --git a/cmd/workspace/policy-families/policy-families.go b/cmd/workspace/policy-families/policy-families.go index 8954afa1d..532317f7f 100755 --- a/cmd/workspace/policy-families/policy-families.go +++ b/cmd/workspace/policy-families/policy-families.go @@ -10,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "policy-families", - Short: `View available policy families.`, - Long: `View available policy families. A policy family contains a policy definition +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "policy-families", + Short: `View available policy families.`, + Long: `View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. Databricks manages and provides policy families for several common cluster use @@ -22,34 +27,51 @@ var Cmd = &cobra.Command{ Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a policy family. Cluster policies created using a policy family inherit the policy family's policy definition.`, - Annotations: map[string]string{ - "package": "compute", - }, + GroupID: "compute", + Annotations: map[string]string{ + "package": "compute", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq compute.GetPolicyFamilyRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *compute.GetPolicyFamilyRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq compute.GetPolicyFamilyRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get POLICY_FAMILY_ID", - Short: `Get policy family information.`, - Long: `Get policy family information. + cmd.Use = "get POLICY_FAMILY_ID" + cmd.Short = `Get policy family information.` + cmd.Long = `Get policy family information. - Retrieve the information for an policy family based on its identifier.`, + Retrieve the information for an policy family based on its identifier.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -60,43 +82,65 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq compute.ListPolicyFamiliesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *compute.ListPolicyFamiliesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq compute.ListPolicyFamiliesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `The max number of policy families to return.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) + cmd.Flags().Int64Var(&listReq.MaxResults, "max-results", listReq.MaxResults, `The max number of policy families to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List policy families.`, - Long: `List policy families. + cmd.Use = "list" + cmd.Short = `List policy families.` + cmd.Long = `List policy families. - Retrieve a list of policy families. This API is paginated.`, + Retrieve a list of policy families. This API is paginated.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -113,10 +157,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service PolicyFamilies diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index 58ed33954..e5a41e128 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -12,47 +12,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "providers", - Short: `Databricks Providers REST API.`, - Long: `Databricks Providers REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "providers", + Short: `Databricks Providers REST API.`, + Long: `Databricks Providers REST API`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateProvider -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateProvider, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateProvider + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`) - createCmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the provider.`) + cmd.Flags().StringVar(&createReq.RecipientProfileStr, "recipient-profile-str", createReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME AUTHENTICATION_TYPE", - Short: `Create an auth provider.`, - Long: `Create an auth provider. + cmd.Use = "create NAME AUTHENTICATION_TYPE" + cmd.Short = `Create an auth provider.` + cmd.Long = `Create an auth provider. Creates a new authentication provider minimally based on a name and - authentication type. The caller must be an admin on the metastore.`, + authentication type. The caller must be an admin on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,52 +96,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteProviderRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteProviderRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteProviderRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a provider.`, - Long: `Delete a provider. + cmd.Use = "delete NAME" + cmd.Short = `Delete a provider.` + cmd.Long = `Delete a provider. Deletes an authentication provider, if the caller is a metastore admin or is - the owner of the provider.`, + the owner of the provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." - names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the provider") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the provider") - } deleteReq.Name = args[0] err = w.Providers.Delete(ctx, deleteReq) @@ -127,53 +158,62 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetProviderRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetProviderRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetProviderRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a provider.`, - Long: `Get a provider. + cmd.Use = "get NAME" + cmd.Short = `Get a provider.` + cmd.Long = `Get a provider. Gets a specific authentication provider. The caller must supply the name of the provider, and must either be a metastore admin or the owner of the - provider.`, + provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." - names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the provider") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the provider") - } getReq.Name = args[0] response, err := w.Providers.Get(ctx, getReq) @@ -181,45 +221,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sharing.ListProvidersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sharing.ListProvidersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sharing.ListProvidersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.DataProviderGlobalMetastoreId, "data-provider-global-metastore-id", listReq.DataProviderGlobalMetastoreId, `If not provided, all providers will be returned.`) + cmd.Flags().StringVar(&listReq.DataProviderGlobalMetastoreId, "data-provider-global-metastore-id", listReq.DataProviderGlobalMetastoreId, `If not provided, all providers will be returned.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List providers.`, - Long: `List providers. + cmd.Use = "list" + cmd.Short = `List providers.` + cmd.Long = `List providers. Gets an array of available authentication providers. The caller must either be a metastore admin or the owner of the providers. Providers not owned by the caller are not included in the response. There is no guarantee of a specific - ordering of the elements in the array.`, + ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -236,53 +298,62 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-shares command -var listSharesReq sharing.ListSharesRequest -func init() { - Cmd.AddCommand(listSharesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSharesOverrides []func( + *cobra.Command, + *sharing.ListSharesRequest, +) + +func newListShares() *cobra.Command { + cmd := &cobra.Command{} + + var listSharesReq sharing.ListSharesRequest + // TODO: short flags -} - -var listSharesCmd = &cobra.Command{ - Use: "list-shares NAME", - Short: `List shares by Provider.`, - Long: `List shares by Provider. + cmd.Use = "list-shares NAME" + cmd.Short = `List shares by Provider.` + cmd.Long = `List shares by Provider. Gets an array of a specified provider's shares within the metastore where: - * the caller is a metastore admin, or * the caller is the owner.`, + * the caller is a metastore admin, or * the caller is the owner.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." - names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the provider in which to list shares") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the provider in which to list shares") - } listSharesReq.Name = args[0] response, err := w.Providers.ListSharesAll(ctx, listSharesReq) @@ -290,41 +361,70 @@ var listSharesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSharesOverrides { + fn(cmd, &listSharesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListShares()) + }) } // start update command -var updateReq sharing.UpdateProvider -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateProvider, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateProvider + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the Provider.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`) - updateCmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the provider.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the Provider.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of Provider owner.`) + cmd.Flags().StringVar(&updateReq.RecipientProfileStr, "recipient-profile-str", updateReq.RecipientProfileStr, `This field is required when the __authentication_type__ is **TOKEN** or not provided.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a provider.`, - Long: `Update a provider. + cmd.Use = "update NAME" + cmd.Short = `Update a provider.` + cmd.Long = `Update a provider. Updates the information for an authentication provider, if the caller is a metastore admin or is the owner of the provider. If the update changes the provider name, the caller must be both a metastore admin and the owner of the - provider.`, + provider.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -334,23 +434,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." - names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The name of the Provider") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the name of the provider") - } updateReq.Name = args[0] } @@ -359,10 +442,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Providers diff --git a/cmd/workspace/queries/overrides.go b/cmd/workspace/queries/overrides.go index 86f47388e..a06dabdeb 100644 --- a/cmd/workspace/queries/overrides.go +++ b/cmd/workspace/queries/overrides.go @@ -1,11 +1,19 @@ package queries -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListQueriesRequest) { // TODO: figure out colored/non-colored headers and colspan shifts listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Author"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{.User.Email|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/queries/queries.go b/cmd/workspace/queries/queries.go index 8cf352783..b1c94ddcd 100755 --- a/cmd/workspace/queries/queries.go +++ b/cmd/workspace/queries/queries.go @@ -12,33 +12,53 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "queries", - Short: `These endpoints are used for CRUD operations on query definitions.`, - Long: `These endpoints are used for CRUD operations on query definitions. Query +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "queries", + Short: `These endpoints are used for CRUD operations on query definitions.`, + Long: `These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the sql_task type of the Jobs API, e.g. :method:jobs/create.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.QueryPostContent -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.QueryPostContent, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.QueryPostContent + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new query definition.`, - Long: `Create a new query definition. + cmd.Use = "create" + cmd.Short = `Create a new query definition.` + cmd.Long = `Create a new query definition. Creates a new query definition. Queries created with this endpoint belong to the authenticated user making the request. @@ -48,18 +68,20 @@ var createCmd = &cobra.Command{ available SQL warehouses. Or you can copy the data_source_id from an existing query. - **Note**: You cannot add a visualization until you create the query.`, + **Note**: You cannot add a visualization until you create the query.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -77,53 +99,62 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteQueryRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteQueryRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteQueryRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete QUERY_ID", - Short: `Delete a query.`, - Long: `Delete a query. + cmd.Use = "delete QUERY_ID" + cmd.Short = `Delete a query.` + cmd.Long = `Delete a query. Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is - deleted after 30 days.`, + deleted after 30 days.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." - names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } deleteReq.QueryId = args[0] err = w.Queries.Delete(ctx, deleteReq) @@ -131,52 +162,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sql.GetQueryRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetQueryRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sql.GetQueryRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get QUERY_ID", - Short: `Get a query definition.`, - Long: `Get a query definition. + cmd.Use = "get QUERY_ID" + cmd.Short = `Get a query definition.` + cmd.Long = `Get a query definition. Retrieve a query object definition along with contextual permissions - information about the currently authenticated user.`, + information about the currently authenticated user.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." - names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } getReq.QueryId = args[0] response, err := w.Queries.Get(ctx, getReq) @@ -184,46 +224,68 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sql.ListQueriesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListQueriesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListQueriesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`) - listCmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) - listCmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`) - listCmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) + cmd.Flags().StringVar(&listReq.Order, "order", listReq.Order, `Name of query attribute to order by.`) + cmd.Flags().IntVar(&listReq.Page, "page", listReq.Page, `Page number to retrieve.`) + cmd.Flags().IntVar(&listReq.PageSize, "page-size", listReq.PageSize, `Number of queries to return per page.`) + cmd.Flags().StringVar(&listReq.Q, "q", listReq.Q, `Full text search term.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get a list of queries.`, - Long: `Get a list of queries. + cmd.Use = "list" + cmd.Short = `Get a list of queries.` + cmd.Long = `Get a list of queries. Gets a list of queries. Optionally, this list can be filtered by a search - term.`, + term.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -240,52 +302,61 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start restore command -var restoreReq sql.RestoreQueryRequest -func init() { - Cmd.AddCommand(restoreCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreOverrides []func( + *cobra.Command, + *sql.RestoreQueryRequest, +) + +func newRestore() *cobra.Command { + cmd := &cobra.Command{} + + var restoreReq sql.RestoreQueryRequest + // TODO: short flags -} - -var restoreCmd = &cobra.Command{ - Use: "restore QUERY_ID", - Short: `Restore a query.`, - Long: `Restore a query. + cmd.Use = "restore QUERY_ID" + cmd.Short = `Restore a query.` + cmd.Long = `Restore a query. Restore a query that has been moved to the trash. A restored query appears in - list views and searches. You can use restored queries for alerts.`, + list views and searches. You can use restored queries for alerts.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." - names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } restoreReq.QueryId = args[0] err = w.Queries.Restore(ctx, restoreReq) @@ -293,41 +364,67 @@ var restoreCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreOverrides { + fn(cmd, &restoreReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestore()) + }) } // start update command -var updateReq sql.QueryEditContent -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.QueryEditContent, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.QueryEditContent + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID.`) - updateCmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`) + cmd.Flags().StringVar(&updateReq.DataSourceId, "data-source-id", updateReq.DataSourceId, `Data source ID.`) + cmd.Flags().StringVar(&updateReq.Description, "description", updateReq.Description, `General description that conveys additional information about this query such as usage notes.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The title of this query that appears in list views, widget headings, and on the query page.`) // TODO: any: options - updateCmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`) + cmd.Flags().StringVar(&updateReq.Query, "query", updateReq.Query, `The text of the query to be run.`) -} - -var updateCmd = &cobra.Command{ - Use: "update QUERY_ID", - Short: `Change a query definition.`, - Long: `Change a query definition. + cmd.Use = "update QUERY_ID" + cmd.Short = `Change a query definition.` + cmd.Long = `Change a query definition. Modify this query definition. - **Note**: You cannot undo this operation.`, + **Note**: You cannot undo this operation.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -337,23 +434,6 @@ var updateCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." - names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have ") - } updateReq.QueryId = args[0] response, err := w.Queries.Update(ctx, updateReq) @@ -361,10 +441,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Queries diff --git a/cmd/workspace/query-history/overrides.go b/cmd/workspace/query-history/overrides.go index 7e7020697..e0d79423c 100644 --- a/cmd/workspace/query-history/overrides.go +++ b/cmd/workspace/query-history/overrides.go @@ -1,10 +1,18 @@ package query_history -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListQueryHistoryRequest) { // TODO: figure out the right format listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.UserName}} {{cyan "%s" .Status}} {{.QueryText}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/query-history/query-history.go b/cmd/workspace/query-history/query-history.go index 5b1e86d0d..1593d6766 100755 --- a/cmd/workspace/query-history/query-history.go +++ b/cmd/workspace/query-history/query-history.go @@ -10,50 +10,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "query-history", - Short: `Access the history of queries through SQL warehouses.`, - Long: `Access the history of queries through SQL warehouses.`, - Annotations: map[string]string{ - "package": "sql", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "query-history", + Short: `Access the history of queries through SQL warehouses.`, + Long: `Access the history of queries through SQL warehouses.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start list command -var listReq sql.ListQueryHistoryRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListQueryHistoryRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListQueryHistoryRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: filter_by - listCmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include metrics about query.`) - listCmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) + cmd.Flags().BoolVar(&listReq.IncludeMetrics, "include-metrics", listReq.IncludeMetrics, `Whether to include metrics about query.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Limit the number of results returned in one page.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `A token that can be used to get the next page of results.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List Queries.`, - Long: `List Queries. + cmd.Use = "list" + cmd.Short = `List Queries.` + cmd.Long = `List Queries. List the history of queries through SQL warehouses. - You can filter by user ID, warehouse ID, status, and time range.`, + You can filter by user ID, warehouse ID, status, and time range.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -70,10 +92,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service QueryHistory diff --git a/cmd/workspace/recipient-activation/recipient-activation.go b/cmd/workspace/recipient-activation/recipient-activation.go index 33bc54ef2..fa0e6a83f 100755 --- a/cmd/workspace/recipient-activation/recipient-activation.go +++ b/cmd/workspace/recipient-activation/recipient-activation.go @@ -9,38 +9,60 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "recipient-activation", - Short: `Databricks Recipient Activation REST API.`, - Long: `Databricks Recipient Activation REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "recipient-activation", + Short: `Databricks Recipient Activation REST API.`, + Long: `Databricks Recipient Activation REST API`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-activation-url-info command -var getActivationUrlInfoReq sharing.GetActivationUrlInfoRequest -func init() { - Cmd.AddCommand(getActivationUrlInfoCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getActivationUrlInfoOverrides []func( + *cobra.Command, + *sharing.GetActivationUrlInfoRequest, +) + +func newGetActivationUrlInfo() *cobra.Command { + cmd := &cobra.Command{} + + var getActivationUrlInfoReq sharing.GetActivationUrlInfoRequest + // TODO: short flags -} - -var getActivationUrlInfoCmd = &cobra.Command{ - Use: "get-activation-url-info ACTIVATION_URL", - Short: `Get a share activation URL.`, - Long: `Get a share activation URL. + cmd.Use = "get-activation-url-info ACTIVATION_URL" + cmd.Short = `Get a share activation URL.` + cmd.Long = `Get a share activation URL. - Gets an activation URL for a share.`, + Gets an activation URL for a share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -51,36 +73,58 @@ var getActivationUrlInfoCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getActivationUrlInfoOverrides { + fn(cmd, &getActivationUrlInfoReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetActivationUrlInfo()) + }) } // start retrieve-token command -var retrieveTokenReq sharing.RetrieveTokenRequest -func init() { - Cmd.AddCommand(retrieveTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var retrieveTokenOverrides []func( + *cobra.Command, + *sharing.RetrieveTokenRequest, +) + +func newRetrieveToken() *cobra.Command { + cmd := &cobra.Command{} + + var retrieveTokenReq sharing.RetrieveTokenRequest + // TODO: short flags -} - -var retrieveTokenCmd = &cobra.Command{ - Use: "retrieve-token ACTIVATION_URL", - Short: `Get an access token.`, - Long: `Get an access token. + cmd.Use = "retrieve-token ACTIVATION_URL" + cmd.Short = `Get an access token.` + cmd.Long = `Get an access token. Retrieve access token with an activation url. This is a public API without any - authentication.`, + authentication.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -91,10 +135,24 @@ var retrieveTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range retrieveTokenOverrides { + fn(cmd, &retrieveTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRetrieveToken()) + }) } // end service RecipientActivation diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index bb8f9b17f..10430cdf2 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -12,52 +12,74 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "recipients", - Short: `Databricks Recipients REST API.`, - Long: `Databricks Recipients REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "recipients", + Short: `Databricks Recipients REST API.`, + Long: `Databricks Recipients REST API`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateRecipient -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateRecipient, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateRecipient + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the recipient.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Description about the recipient.`) // TODO: any: data_recipient_global_metastore_id // TODO: complex arg: ip_access_list - createCmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of the recipient owner.`) + cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of the recipient owner.`) // TODO: complex arg: properties_kvpairs - createCmd.Flags().StringVar(&createReq.SharingCode, "sharing-code", createReq.SharingCode, `The one-time sharing code provided by the data recipient.`) + cmd.Flags().StringVar(&createReq.SharingCode, "sharing-code", createReq.SharingCode, `The one-time sharing code provided by the data recipient.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME AUTHENTICATION_TYPE", - Short: `Create a share recipient.`, - Long: `Create a share recipient. + cmd.Use = "create NAME AUTHENTICATION_TYPE" + cmd.Short = `Create a share recipient.` + cmd.Long = `Create a share recipient. Creates a new recipient with the delta sharing authentication type in the metastore. The caller must be a metastore admin or has the - **CREATE_RECIPIENT** privilege on the metastore.`, + **CREATE_RECIPIENT** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -79,52 +101,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteRecipientRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteRecipientRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteRecipientRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a share recipient.`, - Long: `Delete a share recipient. + cmd.Use = "delete NAME" + cmd.Short = `Delete a share recipient.` + cmd.Long = `Delete a share recipient. Deletes the specified recipient from the metastore. The caller must be the - owner of the recipient.`, + owner of the recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the recipient") - } deleteReq.Name = args[0] err = w.Recipients.Delete(ctx, deleteReq) @@ -132,53 +163,62 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetRecipientRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetRecipientRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetRecipientRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a share recipient.`, - Long: `Get a share recipient. + cmd.Use = "get NAME" + cmd.Short = `Get a share recipient.` + cmd.Long = `Get a share recipient. Gets a share recipient from the metastore if: - * the caller is the owner of the share recipient, or: * is a metastore admin`, + * the caller is the owner of the share recipient, or: * is a metastore admin` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the recipient") - } getReq.Name = args[0] response, err := w.Recipients.Get(ctx, getReq) @@ -186,45 +226,67 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq sharing.ListRecipientsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sharing.ListRecipientsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sharing.ListRecipientsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", listReq.DataRecipientGlobalMetastoreId, `If not provided, all recipients will be returned.`) + cmd.Flags().StringVar(&listReq.DataRecipientGlobalMetastoreId, "data-recipient-global-metastore-id", listReq.DataRecipientGlobalMetastoreId, `If not provided, all recipients will be returned.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List share recipients.`, - Long: `List share recipients. + cmd.Use = "list" + cmd.Short = `List share recipients.` + cmd.Long = `List share recipients. Gets an array of all share recipients within the current metastore where: * the caller is a metastore admin, or * the caller is the owner. There is no - guarantee of a specific ordering of the elements in the array.`, + guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -241,36 +303,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start rotate-token command -var rotateTokenReq sharing.RotateRecipientToken -func init() { - Cmd.AddCommand(rotateTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var rotateTokenOverrides []func( + *cobra.Command, + *sharing.RotateRecipientToken, +) + +func newRotateToken() *cobra.Command { + cmd := &cobra.Command{} + + var rotateTokenReq sharing.RotateRecipientToken + // TODO: short flags -} - -var rotateTokenCmd = &cobra.Command{ - Use: "rotate-token EXISTING_TOKEN_EXPIRE_IN_SECONDS NAME", - Short: `Rotate a token.`, - Long: `Rotate a token. + cmd.Use = "rotate-token EXISTING_TOKEN_EXPIRE_IN_SECONDS NAME" + cmd.Short = `Rotate a token.` + cmd.Long = `Rotate a token. Refreshes the specified recipient's delta sharing authentication token with - the provided token info. The caller must be the owner of the recipient.`, + the provided token info. The caller must be the owner of the recipient.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -285,52 +369,61 @@ var rotateTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range rotateTokenOverrides { + fn(cmd, &rotateTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRotateToken()) + }) } // start share-permissions command -var sharePermissionsReq sharing.SharePermissionsRequest -func init() { - Cmd.AddCommand(sharePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sharePermissionsOverrides []func( + *cobra.Command, + *sharing.SharePermissionsRequest, +) + +func newSharePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var sharePermissionsReq sharing.SharePermissionsRequest + // TODO: short flags -} - -var sharePermissionsCmd = &cobra.Command{ - Use: "share-permissions NAME", - Short: `Get recipient share permissions.`, - Long: `Get recipient share permissions. + cmd.Use = "share-permissions NAME" + cmd.Short = `Get recipient share permissions.` + cmd.Long = `Get recipient share permissions. Gets the share permissions for the specified Recipient. The caller must be a - metastore admin or the owner of the Recipient.`, + metastore admin or the owner of the Recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The name of the Recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the name of the recipient") - } sharePermissionsReq.Name = args[0] response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq) @@ -338,41 +431,70 @@ var sharePermissionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sharePermissionsOverrides { + fn(cmd, &sharePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSharePermissions()) + }) } // start update command -var updateReq sharing.UpdateRecipient -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateRecipient, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateRecipient + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the recipient.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Description about the recipient.`) // TODO: complex arg: ip_access_list - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of Recipient.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of the recipient owner.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of Recipient.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of the recipient owner.`) // TODO: complex arg: properties_kvpairs -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a share recipient.`, - Long: `Update a share recipient. + cmd.Use = "update NAME" + cmd.Short = `Update a share recipient.` + cmd.Long = `Update a share recipient. Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of the recipient. If the recipient name will be updated, - the user must be both a metastore admin and the owner of the recipient.`, + the user must be both a metastore admin and the owner of the recipient.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -382,23 +504,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." - names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of Recipient") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of recipient") - } updateReq.Name = args[0] } @@ -407,10 +512,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Recipients diff --git a/cmd/workspace/repos/overrides.go b/cmd/workspace/repos/overrides.go index 127a794a5..f6f26f81d 100644 --- a/cmd/workspace/repos/overrides.go +++ b/cmd/workspace/repos/overrides.go @@ -7,16 +7,19 @@ import ( "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/spf13/cobra" ) -func init() { +func listOverride(listCmd *cobra.Command, listReq *workspace.ListReposRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{green "%d" .Id}} {{.Path}} {{.Branch|blue}} {{.Url|cyan}} {{end}}`) +} +func createOverride(createCmd *cobra.Command, createReq *workspace.CreateRepo) { createCmd.Use = "create URL [PROVIDER]" createCmd.Args = func(cmd *cobra.Command, args []string) error { // If the provider argument is not specified, we try to detect it from the URL. @@ -26,11 +29,13 @@ func init() { } return check(cmd, args) } + + createJson := createCmd.Flag("json").Value.(*flags.JsonFlag) createCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) if cmd.Flags().Changed("json") { - err = createJson.Unmarshal(&createReq) + err = createJson.Unmarshal(createReq) if err != nil { return err } @@ -46,13 +51,15 @@ func init() { } } } - response, err := w.Repos.Create(ctx, createReq) + response, err := w.Repos.Create(ctx, *createReq) if err != nil { return err } return cmdio.Render(ctx, response) } +} +func deleteOverride(deleteCmd *cobra.Command, deleteReq *workspace.DeleteRepoRequest) { deleteCmd.Use = "delete REPO_ID_OR_PATH" deleteCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -62,13 +69,15 @@ func init() { if err != nil { return err } - err = w.Repos.Delete(ctx, deleteReq) + err = w.Repos.Delete(ctx, *deleteReq) if err != nil { return err } return nil } +} +func getOverride(getCmd *cobra.Command, getReq *workspace.GetRepoRequest) { getCmd.Use = "get REPO_ID_OR_PATH" getCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -78,14 +87,18 @@ func init() { return err } - response, err := w.Repos.Get(ctx, getReq) + response, err := w.Repos.Get(ctx, *getReq) if err != nil { return err } return cmdio.Render(ctx, response) } +} +func updateOverride(updateCmd *cobra.Command, updateReq *workspace.UpdateRepo) { updateCmd.Use = "update REPO_ID_OR_PATH" + + updateJson := updateCmd.Flag("json").Value.(*flags.JsonFlag) updateCmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -101,7 +114,7 @@ func init() { } } - err = w.Repos.Update(ctx, updateReq) + err = w.Repos.Update(ctx, *updateReq) if err != nil { return err } @@ -147,3 +160,11 @@ func repoArgumentToRepoID(ctx context.Context, w *databricks.WorkspaceClient, ar } return oi.ObjectId, nil } + +func init() { + listOverrides = append(listOverrides, listOverride) + createOverrides = append(createOverrides, createOverride) + deleteOverrides = append(deleteOverrides, deleteOverride) + getOverrides = append(getOverrides, getOverride) + updateOverrides = append(updateOverrides, updateOverride) +} diff --git a/cmd/workspace/repos/repos.go b/cmd/workspace/repos/repos.go index fdd9556d4..087a62449 100755 --- a/cmd/workspace/repos/repos.go +++ b/cmd/workspace/repos/repos.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "repos", - Short: `The Repos API allows users to manage their git repos.`, - Long: `The Repos API allows users to manage their git repos. Users can use the API to +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "repos", + Short: `The Repos API allows users to manage their git repos.`, + Long: `The Repos API allows users to manage their git repos. Users can use the API to access all repos that they have manage permissions on. Databricks Repos is a visual Git client in Databricks. It supports common Git @@ -25,44 +30,61 @@ var Cmd = &cobra.Command{ Within Repos you can develop code in notebooks or other files and follow data science and engineering code development best practices using Git for version control, collaboration, and CI/CD.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq workspace.CreateRepo -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *workspace.CreateRepo, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq workspace.CreateRepo + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Path, "path", createReq.Path, `Desired path for the repo in the workspace.`) + cmd.Flags().StringVar(&createReq.Path, "path", createReq.Path, `Desired path for the repo in the workspace.`) // TODO: complex arg: sparse_checkout -} - -var createCmd = &cobra.Command{ - Use: "create URL PROVIDER", - Short: `Create a repo.`, - Long: `Create a repo. + cmd.Use = "create URL PROVIDER" + cmd.Short = `Create a repo.` + cmd.Long = `Create a repo. Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created programmatically must be linked to a remote Git repo, - unlike repos created in the browser.`, + unlike repos created in the browser.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -81,51 +103,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq workspace.DeleteRepoRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.DeleteRepoRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.DeleteRepoRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete REPO_ID", - Short: `Delete a repo.`, - Long: `Delete a repo. + cmd.Use = "delete REPO_ID" + cmd.Short = `Delete a repo.` + cmd.Long = `Delete a repo. - Deletes the specified repo.`, + Deletes the specified repo.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." - names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") - } _, err = fmt.Sscan(args[0], &deleteReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) @@ -136,51 +167,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq workspace.GetRepoRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *workspace.GetRepoRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq workspace.GetRepoRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get REPO_ID", - Short: `Get a repo.`, - Long: `Get a repo. + cmd.Use = "get REPO_ID" + cmd.Short = `Get a repo.` + cmd.Long = `Get a repo. - Returns the repo with the given repo ID.`, + Returns the repo with the given repo ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." - names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") - } _, err = fmt.Sscan(args[0], &getReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) @@ -191,44 +231,66 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq workspace.ListReposRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *workspace.ListReposRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq workspace.ListReposRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.NextPageToken, "next-page-token", listReq.NextPageToken, `Token used to get the next page of results.`) - listCmd.Flags().StringVar(&listReq.PathPrefix, "path-prefix", listReq.PathPrefix, `Filters repos that have paths starting with the given path prefix.`) + cmd.Flags().StringVar(&listReq.NextPageToken, "next-page-token", listReq.NextPageToken, `Token used to get the next page of results.`) + cmd.Flags().StringVar(&listReq.PathPrefix, "path-prefix", listReq.PathPrefix, `Filters repos that have paths starting with the given path prefix.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `Get repos.`, - Long: `Get repos. + cmd.Use = "list" + cmd.Short = `Get repos.` + cmd.Long = `Get repos. Returns repos that the calling user has Manage permissions on. Results are - paginated with each page containing twenty repos.`, + paginated with each page containing twenty repos.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -245,38 +307,64 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq workspace.UpdateRepo -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *workspace.UpdateRepo, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq workspace.UpdateRepo + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Branch, "branch", updateReq.Branch, `Branch that the local version of the repo is checked out to.`) + cmd.Flags().StringVar(&updateReq.Branch, "branch", updateReq.Branch, `Branch that the local version of the repo is checked out to.`) // TODO: complex arg: sparse_checkout - updateCmd.Flags().StringVar(&updateReq.Tag, "tag", updateReq.Tag, `Tag that the local version of the repo is checked out to.`) + cmd.Flags().StringVar(&updateReq.Tag, "tag", updateReq.Tag, `Tag that the local version of the repo is checked out to.`) -} - -var updateCmd = &cobra.Command{ - Use: "update REPO_ID", - Short: `Update a repo.`, - Long: `Update a repo. + cmd.Use = "update REPO_ID" + cmd.Short = `Update a repo.` + cmd.Long = `Update a repo. Updates the repo to a different branch or tag, or updates the repo to the - latest commit on the same branch.`, + latest commit on the same branch.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -286,23 +374,6 @@ var updateCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." - names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id for the corresponding repo to access") - } _, err = fmt.Sscan(args[0], &updateReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) @@ -313,10 +384,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Repos diff --git a/cmd/workspace/schemas/overrides.go b/cmd/workspace/schemas/overrides.go index 4ff8bf124..180690b6e 100644 --- a/cmd/workspace/schemas/overrides.go +++ b/cmd/workspace/schemas/overrides.go @@ -1,10 +1,18 @@ package schemas -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *catalog.ListSchemasRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Full Name"}} {{header "Owner"}} {{header "Comment"}} {{range .}}{{.FullName|green}} {{.Owner|cyan}} {{.Comment}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/schemas/schemas.go b/cmd/workspace/schemas/schemas.go index 4a6eb33b9..e1ad7be4c 100755 --- a/cmd/workspace/schemas/schemas.go +++ b/cmd/workspace/schemas/schemas.go @@ -3,8 +3,6 @@ package schemas import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,53 +10,75 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "schemas", - Short: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.`, - Long: `A schema (also called a database) is the second layer of Unity Catalog’s +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "schemas", + Short: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.`, + Long: `A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT permission on the table or view.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateSchema -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateSchema, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateSchema + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) // TODO: map via StringToStringVar: properties - createCmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within schema.`) + cmd.Flags().StringVar(&createReq.StorageRoot, "storage-root", createReq.StorageRoot, `Storage root URL for managed tables within schema.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME CATALOG_NAME", - Short: `Create a schema.`, - Long: `Create a schema. + cmd.Use = "create NAME CATALOG_NAME" + cmd.Short = `Create a schema.` + cmd.Long = `Create a schema. Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the **CREATE_SCHEMA** privilege in the parent - catalog.`, + catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -77,52 +97,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteSchemaRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteSchemaRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteSchemaRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME", - Short: `Delete a schema.`, - Long: `Delete a schema. + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a schema.` + cmd.Long = `Delete a schema. Deletes the specified schema from the parent catalog. The caller must be the - owner of the schema or an owner of the parent catalog.`, + owner of the schema or an owner of the parent catalog.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." - names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the schema") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the schema") - } deleteReq.FullName = args[0] err = w.Schemas.Delete(ctx, deleteReq) @@ -130,53 +159,62 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetSchemaRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetSchemaRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetSchemaRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get FULL_NAME", - Short: `Get a schema.`, - Long: `Get a schema. + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a schema.` + cmd.Long = `Get a schema. Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the schema, or a user that has the **USE_SCHEMA** - privilege on the schema.`, + privilege on the schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." - names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the schema") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the schema") - } getReq.FullName = args[0] response, err := w.Schemas.Get(ctx, getReq) @@ -184,39 +222,61 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListSchemasRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListSchemasRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListSchemasRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME", - Short: `List schemas.`, - Long: `List schemas. + cmd.Use = "list CATALOG_NAME" + cmd.Short = `List schemas.` + cmd.Long = `List schemas. Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved. There is - no guarantee of a specific ordering of the elements in the array.`, + no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -227,42 +287,68 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateSchema -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateSchema, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateSchema + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of schema, relative to parent catalog.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of schema.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of schema, relative to parent catalog.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of schema.`) // TODO: map via StringToStringVar: properties -} - -var updateCmd = &cobra.Command{ - Use: "update FULL_NAME", - Short: `Update a schema.`, - Long: `Update a schema. + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a schema.` + cmd.Long = `Update a schema. Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If the caller is a metastore admin, only the __owner__ field can be changed in the update. If the __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA** privilege on - the parent catalog.`, + the parent catalog.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -272,23 +358,6 @@ var updateCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." - names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the schema") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the schema") - } updateReq.FullName = args[0] response, err := w.Schemas.Update(ctx, updateReq) @@ -296,10 +365,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Schemas diff --git a/cmd/workspace/secrets/overrides.go b/cmd/workspace/secrets/overrides.go index 5443aca28..40c7babab 100644 --- a/cmd/workspace/secrets/overrides.go +++ b/cmd/workspace/secrets/overrides.go @@ -1,121 +1,22 @@ package secrets import ( - "encoding/base64" - "fmt" - "io" - "os" - - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" - "github.com/databricks/cli/libs/flags" - "github.com/databricks/databricks-sdk-go/service/workspace" "github.com/spf13/cobra" ) -func init() { +func cmdOverride(cmd *cobra.Command) { + cmd.AddCommand(newPutSecret()) +} + +func listScopesOverride(listScopesCmd *cobra.Command) { listScopesCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Scope"}} {{header "Backend Type"}} {{range .}}{{.Name|green}} {{.BackendType}} {{end}}`) - - Cmd.AddCommand(putSecretCmd) - // TODO: short flags - putSecretCmd.Flags().Var(&putSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) - - putSecretCmd.Flags().StringVar(&putSecretReq.BytesValue, "bytes-value", putSecretReq.BytesValue, `If specified, value will be stored as bytes.`) - putSecretCmd.Flags().StringVar(&putSecretReq.StringValue, "string-value", putSecretReq.StringValue, `If specified, note that the value will be stored in UTF-8 (MB4) form.`) } -var putSecretReq workspace.PutSecret -var putSecretJson flags.JsonFlag - -var putSecretCmd = &cobra.Command{ - Use: "put-secret SCOPE KEY", - Short: `Add a secret.`, - Long: `Add a secret. - - Inserts a secret under the provided scope with the given name. If a secret - already exists with the same name, this command overwrites the existing - secret's value. The server encrypts the secret using the secret scope's - encryption settings before storing it. - - You must have WRITE or MANAGE permission on the secret scope. The secret - key must consist of alphanumeric characters, dashes, underscores, and periods, - and cannot exceed 128 characters. The maximum allowed secret value size is 128 - KB. The maximum number of secrets in a given scope is 1000. - - The arguments "string-value" or "bytes-value" specify the type of the secret, - which will determine the value returned when the secret value is requested. - - You can specify the secret value in one of three ways: - * Specify the value as a string using the --string-value flag. - * Input the secret when prompted interactively (single-line secrets). - * Pass the secret via standard input (multi-line secrets). - `, - - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(2) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { - ctx := cmd.Context() - w := root.WorkspaceClient(ctx) - - bytesValueChanged := cmd.Flags().Changed("bytes-value") - stringValueChanged := cmd.Flags().Changed("string-value") - if bytesValueChanged && stringValueChanged { - return fmt.Errorf("cannot specify both --bytes-value and --string-value") - } - - if cmd.Flags().Changed("json") { - err = putSecretJson.Unmarshal(&putSecretReq) - if err != nil { - return err - } - } else { - putSecretReq.Scope = args[0] - putSecretReq.Key = args[1] - - switch { - case bytesValueChanged: - // Bytes value set; encode as base64. - putSecretReq.BytesValue = base64.StdEncoding.EncodeToString([]byte(putSecretReq.BytesValue)) - case stringValueChanged: - // String value set; nothing to do. - default: - // Neither is specified; read secret value from stdin. - bytes, err := promptSecret(cmd) - if err != nil { - return err - } - putSecretReq.BytesValue = base64.StdEncoding.EncodeToString(bytes) - } - } - - err = w.Secrets.PutSecret(ctx, putSecretReq) - if err != nil { - return err - } - return nil - }, -} - -func promptSecret(cmd *cobra.Command) ([]byte, error) { - // If stdin is a TTY, prompt for the secret. - if !cmdio.IsInTTY(cmd.Context()) { - return io.ReadAll(os.Stdin) - } - - value, err := cmdio.Secret(cmd.Context(), "Please enter your secret value") - if err != nil { - return nil, err - } - - return []byte(value), nil +func init() { + cmdOverrides = append(cmdOverrides, cmdOverride) + listScopesOverrides = append(listScopesOverrides, listScopesOverride) } diff --git a/cmd/workspace/secrets/put_secret.go b/cmd/workspace/secrets/put_secret.go new file mode 100644 index 000000000..2fbf49c5c --- /dev/null +++ b/cmd/workspace/secrets/put_secret.go @@ -0,0 +1,122 @@ +package secrets + +import ( + "encoding/base64" + "fmt" + "io" + "os" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/spf13/cobra" +) + +func newPutSecret() *cobra.Command { + cmd := &cobra.Command{} + + var putSecretReq workspace.PutSecret + var putSecretJson flags.JsonFlag + + cmd.Flags().Var(&putSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&putSecretReq.BytesValue, "bytes-value", putSecretReq.BytesValue, `If specified, value will be stored as bytes.`) + cmd.Flags().StringVar(&putSecretReq.StringValue, "string-value", putSecretReq.StringValue, `If specified, note that the value will be stored in UTF-8 (MB4) form.`) + + cmd.Use = "put-secret SCOPE KEY" + cmd.Short = `Add a secret.` + cmd.Long = `Add a secret. + + Inserts a secret under the provided scope with the given name. If a secret + already exists with the same name, this command overwrites the existing + secret's value. The server encrypts the secret using the secret scope's + encryption settings before storing it. + + You must have WRITE or MANAGE permission on the secret scope. The secret + key must consist of alphanumeric characters, dashes, underscores, and periods, + and cannot exceed 128 characters. The maximum allowed secret value size is 128 + KB. The maximum number of secrets in a given scope is 1000. + + The arguments "string-value" or "bytes-value" specify the type of the secret, + which will determine the value returned when the secret value is requested. + + You can specify the secret value in one of three ways: + * Specify the value as a string using the --string-value flag. + * Input the secret when prompted interactively (single-line secrets). + * Pass the secret via standard input (multi-line secrets). + ` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + bytesValueChanged := cmd.Flags().Changed("bytes-value") + stringValueChanged := cmd.Flags().Changed("string-value") + if bytesValueChanged && stringValueChanged { + return fmt.Errorf("cannot specify both --bytes-value and --string-value") + } + + if cmd.Flags().Changed("json") { + err = putSecretJson.Unmarshal(&putSecretReq) + if err != nil { + return err + } + } else { + putSecretReq.Scope = args[0] + putSecretReq.Key = args[1] + + switch { + case bytesValueChanged: + // Bytes value set; encode as base64. + putSecretReq.BytesValue = base64.StdEncoding.EncodeToString([]byte(putSecretReq.BytesValue)) + case stringValueChanged: + // String value set; nothing to do. + default: + // Neither is specified; read secret value from stdin. + bytes, err := promptSecret(cmd) + if err != nil { + return err + } + putSecretReq.BytesValue = base64.StdEncoding.EncodeToString(bytes) + } + } + + err = w.Secrets.PutSecret(ctx, putSecretReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Potential future follow up to auto complete secret scopes for the first argument. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + return cmd +} + +func promptSecret(cmd *cobra.Command) ([]byte, error) { + // If stdin is a TTY, prompt for the secret. + if !cmdio.IsInTTY(cmd.Context()) { + return io.ReadAll(os.Stdin) + } + + value, err := cmdio.Secret(cmd.Context(), "Please enter your secret value") + if err != nil { + return nil, err + } + + return []byte(value), nil +} diff --git a/cmd/workspace/secrets/secrets.go b/cmd/workspace/secrets/secrets.go index fada4d1fe..a8b907ac4 100755 --- a/cmd/workspace/secrets/secrets.go +++ b/cmd/workspace/secrets/secrets.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "secrets", - Short: `The Secrets API allows you to manage secrets, secret scopes, and access permissions.`, - Long: `The Secrets API allows you to manage secrets, secret scopes, and access +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "secrets", + Short: `The Secrets API allows you to manage secrets, secret scopes, and access permissions.`, + Long: `The Secrets API allows you to manage secrets, secret scopes, and access permissions. Sometimes accessing data requires that you authenticate to external data @@ -27,45 +32,62 @@ var Cmd = &cobra.Command{ Databricks secrets. While Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not possible to prevent such users from reading secrets.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-scope command -var createScopeReq workspace.CreateScope -var createScopeJson flags.JsonFlag -func init() { - Cmd.AddCommand(createScopeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createScopeOverrides []func( + *cobra.Command, + *workspace.CreateScope, +) + +func newCreateScope() *cobra.Command { + cmd := &cobra.Command{} + + var createScopeReq workspace.CreateScope + var createScopeJson flags.JsonFlag + // TODO: short flags - createScopeCmd.Flags().Var(&createScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: backend_azure_keyvault - createScopeCmd.Flags().StringVar(&createScopeReq.InitialManagePrincipal, "initial-manage-principal", createScopeReq.InitialManagePrincipal, `The principal that is initially granted MANAGE permission to the created scope.`) - createScopeCmd.Flags().Var(&createScopeReq.ScopeBackendType, "scope-backend-type", `The backend type the scope will be created with.`) + cmd.Flags().StringVar(&createScopeReq.InitialManagePrincipal, "initial-manage-principal", createScopeReq.InitialManagePrincipal, `The principal that is initially granted MANAGE permission to the created scope.`) + cmd.Flags().Var(&createScopeReq.ScopeBackendType, "scope-backend-type", `The backend type the scope will be created with.`) -} - -var createScopeCmd = &cobra.Command{ - Use: "create-scope SCOPE", - Short: `Create a new secret scope.`, - Long: `Create a new secret scope. + cmd.Use = "create-scope SCOPE" + cmd.Short = `Create a new secret scope.` + cmd.Long = `Create a new secret scope. The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not exceed 128 characters. The maximum number of scopes - in a workspace is 100.`, + in a workspace is 100.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -83,45 +105,67 @@ var createScopeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createScopeOverrides { + fn(cmd, &createScopeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateScope()) + }) } // start delete-acl command -var deleteAclReq workspace.DeleteAcl -var deleteAclJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAclOverrides []func( + *cobra.Command, + *workspace.DeleteAcl, +) + +func newDeleteAcl() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAclReq workspace.DeleteAcl + var deleteAclJson flags.JsonFlag + // TODO: short flags - deleteAclCmd.Flags().Var(&deleteAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteAclCmd = &cobra.Command{ - Use: "delete-acl SCOPE PRINCIPAL", - Short: `Delete an ACL.`, - Long: `Delete an ACL. + cmd.Use = "delete-acl SCOPE PRINCIPAL" + cmd.Short = `Delete an ACL.` + cmd.Long = `Delete an ACL. Deletes the given ACL on the given scope. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope, principal, or ACL exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -140,44 +184,66 @@ var deleteAclCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAclOverrides { + fn(cmd, &deleteAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAcl()) + }) } // start delete-scope command -var deleteScopeReq workspace.DeleteScope -var deleteScopeJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteScopeCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteScopeOverrides []func( + *cobra.Command, + *workspace.DeleteScope, +) + +func newDeleteScope() *cobra.Command { + cmd := &cobra.Command{} + + var deleteScopeReq workspace.DeleteScope + var deleteScopeJson flags.JsonFlag + // TODO: short flags - deleteScopeCmd.Flags().Var(&deleteScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteScopeJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteScopeCmd = &cobra.Command{ - Use: "delete-scope SCOPE", - Short: `Delete a secret scope.`, - Long: `Delete a secret scope. + cmd.Use = "delete-scope SCOPE" + cmd.Short = `Delete a secret scope.` + cmd.Long = `Delete a secret scope. Deletes a secret scope. Throws RESOURCE_DOES_NOT_EXIST if the scope does not exist. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -195,45 +261,67 @@ var deleteScopeCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteScopeOverrides { + fn(cmd, &deleteScopeReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteScope()) + }) } // start delete-secret command -var deleteSecretReq workspace.DeleteSecret -var deleteSecretJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteSecretCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteSecretOverrides []func( + *cobra.Command, + *workspace.DeleteSecret, +) + +func newDeleteSecret() *cobra.Command { + cmd := &cobra.Command{} + + var deleteSecretReq workspace.DeleteSecret + var deleteSecretJson flags.JsonFlag + // TODO: short flags - deleteSecretCmd.Flags().Var(&deleteSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteSecretJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteSecretCmd = &cobra.Command{ - Use: "delete-secret SCOPE KEY", - Short: `Delete a secret.`, - Long: `Delete a secret. + cmd.Use = "delete-secret SCOPE KEY" + cmd.Short = `Delete a secret.` + cmd.Long = `Delete a secret. Deletes the secret stored in this secret scope. You must have WRITE or MANAGE permission on the secret scope. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope or secret exists. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -252,40 +340,62 @@ var deleteSecretCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteSecretOverrides { + fn(cmd, &deleteSecretReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteSecret()) + }) } // start get-acl command -var getAclReq workspace.GetAclRequest -func init() { - Cmd.AddCommand(getAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getAclOverrides []func( + *cobra.Command, + *workspace.GetAclRequest, +) + +func newGetAcl() *cobra.Command { + cmd := &cobra.Command{} + + var getAclReq workspace.GetAclRequest + // TODO: short flags -} - -var getAclCmd = &cobra.Command{ - Use: "get-acl SCOPE PRINCIPAL", - Short: `Get secret ACL details.`, - Long: `Get secret ACL details. + cmd.Use = "get-acl SCOPE PRINCIPAL" + cmd.Short = `Get secret ACL details.` + cmd.Long = `Get secret ACL details. Gets the details about the given ACL, such as the group and permission. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -297,40 +407,62 @@ var getAclCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getAclOverrides { + fn(cmd, &getAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetAcl()) + }) } // start list-acls command -var listAclsReq workspace.ListAclsRequest -func init() { - Cmd.AddCommand(listAclsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listAclsOverrides []func( + *cobra.Command, + *workspace.ListAclsRequest, +) + +func newListAcls() *cobra.Command { + cmd := &cobra.Command{} + + var listAclsReq workspace.ListAclsRequest + // TODO: short flags -} - -var listAclsCmd = &cobra.Command{ - Use: "list-acls SCOPE", - Short: `Lists ACLs.`, - Long: `Lists ACLs. + cmd.Use = "list-acls SCOPE" + cmd.Short = `Lists ACLs.` + cmd.Long = `Lists ACLs. List the ACLs for a given secret scope. Users must have the MANAGE permission to invoke this API. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -341,32 +473,50 @@ var listAclsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listAclsOverrides { + fn(cmd, &listAclsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListAcls()) + }) } // start list-scopes command -func init() { - Cmd.AddCommand(listScopesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listScopesOverrides []func( + *cobra.Command, +) -} +func newListScopes() *cobra.Command { + cmd := &cobra.Command{} -var listScopesCmd = &cobra.Command{ - Use: "list-scopes", - Short: `List all scopes.`, - Long: `List all scopes. + cmd.Use = "list-scopes" + cmd.Short = `List all scopes.` + cmd.Long = `List all scopes. Lists all secret scopes available in the workspace. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Secrets.ListScopesAll(ctx) @@ -374,25 +524,45 @@ var listScopesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listScopesOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListScopes()) + }) } // start list-secrets command -var listSecretsReq workspace.ListSecretsRequest -func init() { - Cmd.AddCommand(listSecretsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSecretsOverrides []func( + *cobra.Command, + *workspace.ListSecretsRequest, +) + +func newListSecrets() *cobra.Command { + cmd := &cobra.Command{} + + var listSecretsReq workspace.ListSecretsRequest + // TODO: short flags -} - -var listSecretsCmd = &cobra.Command{ - Use: "list-secrets SCOPE", - Short: `List secret keys.`, - Long: `List secret keys. + cmd.Use = "list-secrets SCOPE" + cmd.Short = `List secret keys.` + cmd.Long = `List secret keys. Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data cannot be retrieved using this API. Users need the READ @@ -401,15 +571,17 @@ var listSecretsCmd = &cobra.Command{ The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws RESOURCE_DOES_NOT_EXIST if no such secret scope exists. Throws PERMISSION_DENIED if the user does not have permission to make this API - call.`, + call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -420,27 +592,47 @@ var listSecretsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSecretsOverrides { + fn(cmd, &listSecretsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListSecrets()) + }) } // start put-acl command -var putAclReq workspace.PutAcl -var putAclJson flags.JsonFlag -func init() { - Cmd.AddCommand(putAclCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var putAclOverrides []func( + *cobra.Command, + *workspace.PutAcl, +) + +func newPutAcl() *cobra.Command { + cmd := &cobra.Command{} + + var putAclReq workspace.PutAcl + var putAclJson flags.JsonFlag + // TODO: short flags - putAclCmd.Flags().Var(&putAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&putAclJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var putAclCmd = &cobra.Command{ - Use: "put-acl SCOPE PRINCIPAL PERMISSION", - Short: `Create/update an ACL.`, - Long: `Create/update an ACL. + cmd.Use = "put-acl SCOPE PRINCIPAL PERMISSION" + cmd.Short = `Create/update an ACL.` + cmd.Long = `Create/update an ACL. Creates or overwrites the Access Control List (ACL) associated with the given principal (user or group) on the specified scope point. @@ -467,18 +659,20 @@ var putAclCmd = &cobra.Command{ RESOURCE_ALREADY_EXISTS if a permission for the principal already exists. Throws INVALID_PARAMETER_VALUE if the permission or principal is invalid. Throws PERMISSION_DENIED if the user does not have permission to make this - API call.`, + API call.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -501,10 +695,24 @@ var putAclCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range putAclOverrides { + fn(cmd, &putAclReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPutAcl()) + }) } // end service Secrets diff --git a/cmd/workspace/service-principals/overrides.go b/cmd/workspace/service-principals/overrides.go index c335bead6..185549b7c 100644 --- a/cmd/workspace/service-principals/overrides.go +++ b/cmd/workspace/service-principals/overrides.go @@ -1,9 +1,17 @@ package service_principals -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListServicePrincipalsRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.ApplicationId}} {{.DisplayName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/service-principals/service-principals.go b/cmd/workspace/service-principals/service-principals.go index 4bb75d2b4..787ca29ef 100755 --- a/cmd/workspace/service-principals/service-principals.go +++ b/cmd/workspace/service-principals/service-principals.go @@ -3,8 +3,6 @@ package service_principals import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,57 +10,79 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "service-principals", - Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, - Long: `Identities for use with jobs, automated tools, and systems such as scripts, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "service-principals", + Short: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.`, + Long: `Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms. Databricks recommends creating service principals to run production jobs or modify production data. If all processes that act on production data run with service principals, interactive users do not need any write, delete, or modify privileges in production. This eliminates the risk of a user overwriting production data by accident.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.ServicePrincipal -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.ServicePrincipal + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.ApplicationId, "application-id", createReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a service principal.`, - Long: `Create a service principal. + cmd.Use = "create" + cmd.Short = `Create a service principal.` + cmd.Long = `Create a service principal. - Creates a new service principal in the Databricks workspace.`, + Creates a new service principal in the Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -79,51 +99,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteServicePrincipalRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteServicePrincipalRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteServicePrincipalRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a service principal.`, - Long: `Delete a service principal. + cmd.Use = "delete ID" + cmd.Short = `Delete a service principal.` + cmd.Long = `Delete a service principal. - Delete a single service principal in the Databricks workspace.`, + Delete a single service principal in the Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." - names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") - } deleteReq.Id = args[0] err = w.ServicePrincipals.Delete(ctx, deleteReq) @@ -131,52 +160,61 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetServicePrincipalRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetServicePrincipalRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetServicePrincipalRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get service principal details.`, - Long: `Get service principal details. + cmd.Use = "get ID" + cmd.Short = `Get service principal details.` + cmd.Long = `Get service principal details. Gets the details for a single service principal define in the Databricks - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." - names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") - } getReq.Id = args[0] response, err := w.ServicePrincipals.Get(ctx, getReq) @@ -184,48 +222,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListServicePrincipalsRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListServicePrincipalsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListServicePrincipalsRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List service principals.`, - Long: `List service principals. + cmd.Use = "list" + cmd.Short = `List service principals.` + cmd.Long = `List service principals. - Gets the set of service principals associated with a Databricks workspace.`, + Gets the set of service principals associated with a Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -242,37 +302,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update service principal details.`, - Long: `Update service principal details. + cmd.Use = "patch ID" + cmd.Short = `Update service principal details.` + cmd.Long = `Update service principal details. Partially updates the details of a single service principal in the Databricks - workspace.`, + workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -282,23 +368,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." - names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") - } patchReq.Id = args[0] err = w.ServicePrincipals.Patch(ctx, patchReq) @@ -306,44 +375,73 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.ServicePrincipal -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.ServicePrincipal, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.ServicePrincipal + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.ApplicationId, "application-id", updateReq.ApplicationId, `UUID relating to the service principal.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks service principal ID.`) // TODO: array: roles -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace service principal.`, - Long: `Replace service principal. + cmd.Use = "update ID" + cmd.Short = `Replace service principal.` + cmd.Long = `Replace service principal. Updates the details of a single service principal. - This action replaces the existing service principal with the same name.`, + This action replaces the existing service principal with the same name.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -353,23 +451,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." - names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks service principal ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks service principal id") - } updateReq.Id = args[0] } @@ -378,10 +459,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service ServicePrincipals diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index 46c830ebc..33b0abac7 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -13,10 +13,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "serving-endpoints", - Short: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints.`, - Long: `The Serving Endpoints API allows you to create, update, and delete model +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "serving-endpoints", + Short: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints.`, + Long: `The Serving Endpoints API allows you to create, update, and delete model serving endpoints. You can use a serving endpoint to serve models from the Databricks Model @@ -29,35 +34,52 @@ var Cmd = &cobra.Command{ settings to define how requests should be routed to your served models behind an endpoint. Additionally, you can configure the scale of resources that should be applied to each served model.`, - Annotations: map[string]string{ - "package": "serving", - }, + GroupID: "serving", + Annotations: map[string]string{ + "package": "serving", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start build-logs command -var buildLogsReq serving.BuildLogsRequest -func init() { - Cmd.AddCommand(buildLogsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var buildLogsOverrides []func( + *cobra.Command, + *serving.BuildLogsRequest, +) + +func newBuildLogs() *cobra.Command { + cmd := &cobra.Command{} + + var buildLogsReq serving.BuildLogsRequest + // TODO: short flags -} - -var buildLogsCmd = &cobra.Command{ - Use: "build-logs NAME SERVED_MODEL_NAME", - Short: `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model.`, - Long: `Retrieve the logs associated with building the model's environment for a given + cmd.Use = "build-logs NAME SERVED_MODEL_NAME" + cmd.Short = `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model.` + cmd.Long = `Retrieve the logs associated with building the model's environment for a given serving endpoint's served model. - Retrieves the build logs associated with the provided served model.`, + Retrieves the build logs associated with the provided served model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -69,37 +91,57 @@ var buildLogsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range buildLogsOverrides { + fn(cmd, &buildLogsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newBuildLogs()) + }) } // start create command -var createReq serving.CreateServingEndpoint -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *serving.CreateServingEndpoint, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach NOT_UPDATING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) + var createReq serving.CreateServingEndpoint + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach NOT_UPDATING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} + cmd.Use = "create" + cmd.Short = `Create a new serving endpoint.` + cmd.Long = `Create a new serving endpoint.` -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new serving endpoint.`, - Long: `Create a new serving endpoint.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -130,33 +172,55 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq serving.DeleteServingEndpointRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *serving.DeleteServingEndpointRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq serving.DeleteServingEndpointRequest + // TODO: short flags -} + cmd.Use = "delete NAME" + cmd.Short = `Delete a serving endpoint.` + cmd.Long = `Delete a serving endpoint.` -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a serving endpoint.`, - Long: `Delete a serving endpoint.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -167,36 +231,58 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start export-metrics command -var exportMetricsReq serving.ExportMetricsRequest -func init() { - Cmd.AddCommand(exportMetricsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportMetricsOverrides []func( + *cobra.Command, + *serving.ExportMetricsRequest, +) + +func newExportMetrics() *cobra.Command { + cmd := &cobra.Command{} + + var exportMetricsReq serving.ExportMetricsRequest + // TODO: short flags -} - -var exportMetricsCmd = &cobra.Command{ - Use: "export-metrics NAME", - Short: `Retrieve the metrics associated with a serving endpoint.`, - Long: `Retrieve the metrics associated with a serving endpoint. + cmd.Use = "export-metrics NAME" + cmd.Short = `Retrieve the metrics associated with a serving endpoint.` + cmd.Long = `Retrieve the metrics associated with a serving endpoint. Retrieves the metrics associated with the provided serving endpoint in either - Prometheus or OpenMetrics exposition format.`, + Prometheus or OpenMetrics exposition format.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -207,35 +293,57 @@ var exportMetricsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportMetricsOverrides { + fn(cmd, &exportMetricsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportMetrics()) + }) } // start get command -var getReq serving.GetServingEndpointRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *serving.GetServingEndpointRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq serving.GetServingEndpointRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a single serving endpoint.`, - Long: `Get a single serving endpoint. + cmd.Use = "get NAME" + cmd.Short = `Get a single serving endpoint.` + cmd.Long = `Get a single serving endpoint. - Retrieves the details for a single serving endpoint.`, + Retrieves the details for a single serving endpoint.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -246,27 +354,45 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `Retrieve all serving endpoints.`, - Long: `Retrieve all serving endpoints.`, + cmd.Use = "list" + cmd.Short = `Retrieve all serving endpoints.` + cmd.Long = `Retrieve all serving endpoints.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.ServingEndpoints.ListAll(ctx) @@ -274,36 +400,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start logs command -var logsReq serving.LogsRequest -func init() { - Cmd.AddCommand(logsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var logsOverrides []func( + *cobra.Command, + *serving.LogsRequest, +) + +func newLogs() *cobra.Command { + cmd := &cobra.Command{} + + var logsReq serving.LogsRequest + // TODO: short flags -} - -var logsCmd = &cobra.Command{ - Use: "logs NAME SERVED_MODEL_NAME", - Short: `Retrieve the most recent log lines associated with a given serving endpoint's served model.`, - Long: `Retrieve the most recent log lines associated with a given serving endpoint's + cmd.Use = "logs NAME SERVED_MODEL_NAME" + cmd.Short = `Retrieve the most recent log lines associated with a given serving endpoint's served model.` + cmd.Long = `Retrieve the most recent log lines associated with a given serving endpoint's served model. - Retrieves the service logs associated with the provided served model.`, + Retrieves the service logs associated with the provided served model.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -315,33 +463,55 @@ var logsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range logsOverrides { + fn(cmd, &logsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newLogs()) + }) } // start query command -var queryReq serving.QueryRequest -func init() { - Cmd.AddCommand(queryCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var queryOverrides []func( + *cobra.Command, + *serving.QueryRequest, +) + +func newQuery() *cobra.Command { + cmd := &cobra.Command{} + + var queryReq serving.QueryRequest + // TODO: short flags -} + cmd.Use = "query NAME" + cmd.Short = `Query a serving endpoint with provided model input.` + cmd.Long = `Query a serving endpoint with provided model input.` -var queryCmd = &cobra.Command{ - Use: "query NAME", - Short: `Query a serving endpoint with provided model input.`, - Long: `Query a serving endpoint with provided model input.`, + cmd.Annotations = make(map[string]string) - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -352,44 +522,64 @@ var queryCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range queryOverrides { + fn(cmd, &queryReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newQuery()) + }) } // start update-config command -var updateConfigReq serving.EndpointCoreConfigInput -var updateConfigJson flags.JsonFlag -var updateConfigSkipWait bool -var updateConfigTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateConfigOverrides []func( + *cobra.Command, + *serving.EndpointCoreConfigInput, +) -func init() { - Cmd.AddCommand(updateConfigCmd) +func newUpdateConfig() *cobra.Command { + cmd := &cobra.Command{} - updateConfigCmd.Flags().BoolVar(&updateConfigSkipWait, "no-wait", updateConfigSkipWait, `do not wait to reach NOT_UPDATING state`) - updateConfigCmd.Flags().DurationVar(&updateConfigTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) + var updateConfigReq serving.EndpointCoreConfigInput + var updateConfigJson flags.JsonFlag + + var updateConfigSkipWait bool + var updateConfigTimeout time.Duration + + cmd.Flags().BoolVar(&updateConfigSkipWait, "no-wait", updateConfigSkipWait, `do not wait to reach NOT_UPDATING state`) + cmd.Flags().DurationVar(&updateConfigTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach NOT_UPDATING state`) // TODO: short flags - updateConfigCmd.Flags().Var(&updateConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: traffic_config -} - -var updateConfigCmd = &cobra.Command{ - Use: "update-config", - Short: `Update a serving endpoint with a new config.`, - Long: `Update a serving endpoint with a new config. + cmd.Use = "update-config" + cmd.Short = `Update a serving endpoint with a new config.` + cmd.Long = `Update a serving endpoint with a new config. Updates any combination of the serving endpoint's served models, the compute configuration of those served models, and the endpoint's traffic config. An endpoint that already has an update in progress can not be updated until the - current update completes or fails.`, + current update completes or fails.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -420,10 +610,24 @@ var updateConfigCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateConfigOverrides { + fn(cmd, &updateConfigReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateConfig()) + }) } // end service ServingEndpoints diff --git a/cmd/workspace/shares/shares.go b/cmd/workspace/shares/shares.go index 2580b060e..7643567a9 100755 --- a/cmd/workspace/shares/shares.go +++ b/cmd/workspace/shares/shares.go @@ -10,47 +10,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "shares", - Short: `Databricks Shares REST API.`, - Long: `Databricks Shares REST API`, - Annotations: map[string]string{ - "package": "sharing", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "shares", + Short: `Databricks Shares REST API.`, + Long: `Databricks Shares REST API`, + GroupID: "sharing", + Annotations: map[string]string{ + "package": "sharing", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sharing.CreateShare -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sharing.CreateShare, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sharing.CreateShare + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a share.`, - Long: `Create a share. + cmd.Use = "create NAME" + cmd.Short = `Create a share.` + cmd.Long = `Create a share. Creates a new share for data objects. Data objects can be added after creation with **update**. The caller must be a metastore admin or have the - **CREATE_SHARE** privilege on the metastore.`, + **CREATE_SHARE** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -68,36 +90,58 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sharing.DeleteShareRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sharing.DeleteShareRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sharing.DeleteShareRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a share.`, - Long: `Delete a share. + cmd.Use = "delete NAME" + cmd.Short = `Delete a share.` + cmd.Long = `Delete a share. Deletes a data object share from the metastore. The caller must be an owner of - the share.`, + the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -108,38 +152,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq sharing.GetShareRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sharing.GetShareRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq sharing.GetShareRequest + // TODO: short flags - getCmd.Flags().BoolVar(&getReq.IncludeSharedData, "include-shared-data", getReq.IncludeSharedData, `Query for data to include in the share.`) + cmd.Flags().BoolVar(&getReq.IncludeSharedData, "include-shared-data", getReq.IncludeSharedData, `Query for data to include in the share.`) -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a share.`, - Long: `Get a share. + cmd.Use = "get NAME" + cmd.Short = `Get a share.` + cmd.Long = `Get a share. Gets a data object share from the metastore. The caller must be a metastore - admin or the owner of the share.`, + admin or the owner of the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -150,31 +216,49 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List shares.`, - Long: `List shares. + cmd.Use = "list" + cmd.Short = `List shares.` + cmd.Long = `List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific - ordering of the elements in the array.`, + ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Shares.ListAll(ctx) @@ -182,36 +266,58 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start share-permissions command -var sharePermissionsReq sharing.SharePermissionsRequest -func init() { - Cmd.AddCommand(sharePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var sharePermissionsOverrides []func( + *cobra.Command, + *sharing.SharePermissionsRequest, +) + +func newSharePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var sharePermissionsReq sharing.SharePermissionsRequest + // TODO: short flags -} - -var sharePermissionsCmd = &cobra.Command{ - Use: "share-permissions NAME", - Short: `Get permissions.`, - Long: `Get permissions. + cmd.Use = "share-permissions NAME" + cmd.Short = `Get permissions.` + cmd.Long = `Get permissions. Gets the permissions for a data share from the metastore. The caller must be a - metastore admin or the owner of the share.`, + metastore admin or the owner of the share.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -222,32 +328,52 @@ var sharePermissionsCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range sharePermissionsOverrides { + fn(cmd, &sharePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSharePermissions()) + }) } // start update command -var updateReq sharing.UpdateShare -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sharing.UpdateShare, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sharing.UpdateShare + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the share.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of share.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the share.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of share.`) // TODO: array: updates -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a share.`, - Long: `Update a share. + cmd.Use = "update NAME" + cmd.Short = `Update a share.` + cmd.Long = `Update a share. Updates the share with the changes and data objects in the request. The caller must be the owner of the share or a metastore admin. @@ -262,18 +388,20 @@ var updateCmd = &cobra.Command{ indefinitely for recipients to be able to access the table. Typically, you should use a group as the share owner. - Table removals through **update** do not require additional privileges.`, + Table removals through **update** do not require additional privileges.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -291,43 +419,65 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start update-permissions command -var updatePermissionsReq sharing.UpdateSharePermissions -var updatePermissionsJson flags.JsonFlag -func init() { - Cmd.AddCommand(updatePermissionsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *sharing.UpdateSharePermissions, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq sharing.UpdateSharePermissions + var updatePermissionsJson flags.JsonFlag + // TODO: short flags - updatePermissionsCmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: changes -} - -var updatePermissionsCmd = &cobra.Command{ - Use: "update-permissions NAME", - Short: `Update permissions.`, - Long: `Update permissions. + cmd.Use = "update-permissions NAME" + cmd.Short = `Update permissions.` + cmd.Long = `Update permissions. Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an owner of the share. For new recipient grants, the user must also be the owner of the recipients. - recipient revocations do not require additional privileges.`, + recipient revocations do not require additional privileges.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -344,10 +494,24 @@ var updatePermissionsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) } // end service Shares diff --git a/cmd/workspace/storage-credentials/overrides.go b/cmd/workspace/storage-credentials/overrides.go index 8bce9ffa2..37c18ca6c 100644 --- a/cmd/workspace/storage-credentials/overrides.go +++ b/cmd/workspace/storage-credentials/overrides.go @@ -1,10 +1,17 @@ package storage_credentials -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Credentials"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{if .AwsIamRole}}{{.AwsIamRole.RoleArn}}{{end}}{{if .AzureServicePrincipal}}{{.AzureServicePrincipal.ApplicationId}}{{end}}{{if .GcpServiceAccountKey}}{{.Email}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/storage-credentials/storage-credentials.go b/cmd/workspace/storage-credentials/storage-credentials.go index bbd7dd581..337fddcfe 100755 --- a/cmd/workspace/storage-credentials/storage-credentials.go +++ b/cmd/workspace/storage-credentials/storage-credentials.go @@ -3,8 +3,6 @@ package storage_credentials import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "storage-credentials", - Short: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.`, - Long: `A storage credential represents an authentication and authorization mechanism +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "storage-credentials", + Short: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant.`, + Long: `A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that control which users and groups can access the credential. If a user does not have access to a storage @@ -28,34 +31,49 @@ var Cmd = &cobra.Command{ To create storage credentials, you must be a Databricks account admin. The account admin who creates the storage credential can delegate ownership to another user or group to manage permissions on it.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateStorageCredential -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateStorageCredential, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateStorageCredential + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Comment associated with the credential.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Comment associated with the credential.`) // TODO: output-only field - createCmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) - createCmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Supplying true to this argument skips validation of the created credential.`) + cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Supplying true to this argument skips validation of the created credential.`) -} - -var createCmd = &cobra.Command{ - Use: "create NAME", - Short: `Create a storage credential.`, - Long: `Create a storage credential. + cmd.Use = "create NAME" + cmd.Short = `Create a storage credential.` + cmd.Long = `Create a storage credential. Creates a new storage credential. The request object is specific to the cloud: @@ -64,18 +82,20 @@ var createCmd = &cobra.Command{ **DatabricksGcpServiceAccount** for GCP managed credentials. The caller must be a metastore admin and have the - **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.`, + **CREATE_STORAGE_CREDENTIAL** privilege on the metastore.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -93,54 +113,63 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteStorageCredentialRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteStorageCredentialRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteStorageCredentialRequest + // TODO: short flags - deleteCmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external locations or external tables.`) + cmd.Flags().BoolVar(&deleteReq.Force, "force", deleteReq.Force, `Force deletion even if there are dependent external locations or external tables.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete NAME", - Short: `Delete a credential.`, - Long: `Delete a credential. + cmd.Use = "delete NAME" + cmd.Short = `Delete a credential.` + cmd.Long = `Delete a credential. Deletes a storage credential from the metastore. The caller must be an owner - of the storage credential.`, + of the storage credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." - names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the storage credential") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the storage credential") - } deleteReq.Name = args[0] err = w.StorageCredentials.Delete(ctx, deleteReq) @@ -148,53 +177,62 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetStorageCredentialRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetStorageCredentialRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetStorageCredentialRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get a credential.`, - Long: `Get a credential. + cmd.Use = "get NAME" + cmd.Short = `Get a credential.` + cmd.Long = `Get a credential. Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the storage credential, or have some permission on the - storage credential.`, + storage credential.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." - names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of the storage credential") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of the storage credential") - } getReq.Name = args[0] response, err := w.StorageCredentials.Get(ctx, getReq) @@ -202,33 +240,51 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List credentials.`, - Long: `List credentials. + cmd.Use = "list" + cmd.Short = `List credentials.` + cmd.Long = `List credentials. Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to only those storage credentials the caller has permission to access. If the caller is a metastore admin, all storage credentials will be retrieved. There is no guarantee of a specific ordering of - the elements in the array.`, + the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.StorageCredentials.ListAll(ctx) @@ -236,46 +292,75 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start update command -var updateReq catalog.UpdateStorageCredential -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateStorageCredential, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateStorageCredential + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `Comment associated with the credential.`) // TODO: output-only field - updateCmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The credential name.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`) - updateCmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) - updateCmd.Flags().BoolVar(&updateReq.SkipValidation, "skip-validation", updateReq.SkipValidation, `Supplying true to this argument skips validation of the updated credential.`) + cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if there are dependent external locations or external tables.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The credential name.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of credential.`) + cmd.Flags().BoolVar(&updateReq.ReadOnly, "read-only", updateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().BoolVar(&updateReq.SkipValidation, "skip-validation", updateReq.SkipValidation, `Supplying true to this argument skips validation of the updated credential.`) -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update a credential.`, - Long: `Update a credential. + cmd.Use = "update NAME" + cmd.Short = `Update a credential.` + cmd.Long = `Update a credential. Updates a storage credential on the metastore. The caller must be the owner of the storage credential or a metastore admin. If the caller is a metastore - admin, only the __owner__ credential can be changed.`, + admin, only the __owner__ credential can be changed.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -285,23 +370,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." - names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The credential name") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the credential name") - } updateReq.Name = args[0] } @@ -310,36 +378,56 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // start validate command -var validateReq catalog.ValidateStorageCredential -var validateJson flags.JsonFlag -func init() { - Cmd.AddCommand(validateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var validateOverrides []func( + *cobra.Command, + *catalog.ValidateStorageCredential, +) + +func newValidate() *cobra.Command { + cmd := &cobra.Command{} + + var validateReq catalog.ValidateStorageCredential + var validateJson flags.JsonFlag + // TODO: short flags - validateCmd.Flags().Var(&validateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&validateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: aws_iam_role // TODO: complex arg: azure_managed_identity // TODO: complex arg: azure_service_principal // TODO: output-only field - validateCmd.Flags().StringVar(&validateReq.ExternalLocationName, "external-location-name", validateReq.ExternalLocationName, `The name of an existing external location to validate.`) - validateCmd.Flags().BoolVar(&validateReq.ReadOnly, "read-only", validateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) + cmd.Flags().StringVar(&validateReq.ExternalLocationName, "external-location-name", validateReq.ExternalLocationName, `The name of an existing external location to validate.`) + cmd.Flags().BoolVar(&validateReq.ReadOnly, "read-only", validateReq.ReadOnly, `Whether the storage credential is only usable for read operations.`) // TODO: any: storage_credential_name - validateCmd.Flags().StringVar(&validateReq.Url, "url", validateReq.Url, `The external location url to validate.`) + cmd.Flags().StringVar(&validateReq.Url, "url", validateReq.Url, `The external location url to validate.`) -} - -var validateCmd = &cobra.Command{ - Use: "validate", - Short: `Validate a storage credential.`, - Long: `Validate a storage credential. + cmd.Use = "validate" + cmd.Short = `Validate a storage credential.` + cmd.Long = `Validate a storage credential. Validates a storage credential. At least one of __external_location_name__ and __url__ need to be provided. If only one of them is provided, it will be used @@ -352,18 +440,20 @@ var validateCmd = &cobra.Command{ The caller must be a metastore admin or the storage credential owner or have the **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage - credential.`, + credential.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -380,10 +470,24 @@ var validateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range validateOverrides { + fn(cmd, &validateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newValidate()) + }) } // end service StorageCredentials diff --git a/cmd/workspace/system-schemas/system-schemas.go b/cmd/workspace/system-schemas/system-schemas.go index fed5e5e52..2dd729f1b 100755 --- a/cmd/workspace/system-schemas/system-schemas.go +++ b/cmd/workspace/system-schemas/system-schemas.go @@ -11,44 +11,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "system-schemas", - Short: `A system schema is a schema that lives within the system catalog.`, - Long: `A system schema is a schema that lives within the system catalog. A system +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "system-schemas", + Short: `A system schema is a schema that lives within the system catalog.`, + Long: `A system schema is a schema that lives within the system catalog. A system schema may contain information about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start disable command -var disableReq catalog.DisableRequest -func init() { - Cmd.AddCommand(disableCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var disableOverrides []func( + *cobra.Command, + *catalog.DisableRequest, +) + +func newDisable() *cobra.Command { + cmd := &cobra.Command{} + + var disableReq catalog.DisableRequest + // TODO: short flags -} - -var disableCmd = &cobra.Command{ - Use: "disable METASTORE_ID SCHEMA_NAME", - Short: `Disable a system schema.`, - Long: `Disable a system schema. + cmd.Use = "disable METASTORE_ID SCHEMA_NAME" + cmd.Short = `Disable a system schema.` + cmd.Long = `Disable a system schema. Disables the system schema and removes it from the system catalog. The caller - must be an account admin or a metastore admin.`, + must be an account admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -63,36 +85,58 @@ var disableCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range disableOverrides { + fn(cmd, &disableReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDisable()) + }) } // start enable command -var enableReq catalog.EnableRequest -func init() { - Cmd.AddCommand(enableCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var enableOverrides []func( + *cobra.Command, + *catalog.EnableRequest, +) + +func newEnable() *cobra.Command { + cmd := &cobra.Command{} + + var enableReq catalog.EnableRequest + // TODO: short flags -} - -var enableCmd = &cobra.Command{ - Use: "enable METASTORE_ID SCHEMA_NAME", - Short: `Enable a system schema.`, - Long: `Enable a system schema. + cmd.Use = "enable METASTORE_ID SCHEMA_NAME" + cmd.Short = `Enable a system schema.` + cmd.Long = `Enable a system schema. Enables the system schema and adds it to the system catalog. The caller must - be an account admin or a metastore admin.`, + be an account admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -107,36 +151,58 @@ var enableCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range enableOverrides { + fn(cmd, &enableReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEnable()) + }) } // start list command -var listReq catalog.ListSystemSchemasRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListSystemSchemasRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListSystemSchemasRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list METASTORE_ID", - Short: `List system schemas.`, - Long: `List system schemas. + cmd.Use = "list METASTORE_ID" + cmd.Short = `List system schemas.` + cmd.Long = `List system schemas. Gets an array of system schemas for a metastore. The caller must be an account - admin or a metastore admin.`, + admin or a metastore admin.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -147,10 +213,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service SystemSchemas diff --git a/cmd/workspace/table-constraints/table-constraints.go b/cmd/workspace/table-constraints/table-constraints.go index d9588b8fc..023846a65 100755 --- a/cmd/workspace/table-constraints/table-constraints.go +++ b/cmd/workspace/table-constraints/table-constraints.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "table-constraints", - Short: `Primary key and foreign key constraints encode relationships between fields in tables.`, - Long: `Primary key and foreign key constraints encode relationships between fields in +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "table-constraints", + Short: `Primary key and foreign key constraints encode relationships between fields in tables.`, + Long: `Primary key and foreign key constraints encode relationships between fields in tables. Primary and foreign keys are informational only and are not enforced. Foreign @@ -28,26 +33,41 @@ var Cmd = &cobra.Command{ You can declare primary keys and foreign keys as part of the table specification during table creation. You can also add or drop constraints on existing tables.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateTableConstraint -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateTableConstraint, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateTableConstraint + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a table constraint.`, - Long: `Create a table constraint. + cmd.Use = "create" + cmd.Short = `Create a table constraint.` + cmd.Long = `Create a table constraint. Creates a new table constraint. @@ -58,11 +78,12 @@ var createCmd = &cobra.Command{ __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the owner of the referenced parent - table.`, + table.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -80,25 +101,45 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteTableConstraintRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteTableConstraintRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteTableConstraintRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME CONSTRAINT_NAME CASCADE", - Short: `Delete a table constraint.`, - Long: `Delete a table constraint. + cmd.Use = "delete FULL_NAME CONSTRAINT_NAME CASCADE" + cmd.Short = `Delete a table constraint.` + cmd.Long = `Delete a table constraint. Deletes a table constraint. @@ -108,15 +149,17 @@ var deleteCmd = &cobra.Command{ schema, and be the owner of the table. - if __cascade__ argument is **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG** privilege on the table's catalog, the **USE_SCHEMA** privilege - on the table's schema, and be the owner of the table.`, + on the table's schema, and be the owner of the table.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(3) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -132,10 +175,24 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // end service TableConstraints diff --git a/cmd/workspace/tables/overrides.go b/cmd/workspace/tables/overrides.go index ed9c86ed5..35fc351a4 100644 --- a/cmd/workspace/tables/overrides.go +++ b/cmd/workspace/tables/overrides.go @@ -1,10 +1,18 @@ package tables -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *catalog.ListTablesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "Full Name"}} {{header "Table Type"}} {{range .}}{{.FullName|green}} {{blue "%s" .TableType}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/tables/tables.go b/cmd/workspace/tables/tables.go index d57b72f1b..b7b45de46 100755 --- a/cmd/workspace/tables/tables.go +++ b/cmd/workspace/tables/tables.go @@ -3,18 +3,21 @@ package tables import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/databricks-sdk-go/service/catalog" "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "tables", - Short: `A table resides in the third layer of Unity Catalog’s three-level namespace.`, - Long: `A table resides in the third layer of Unity Catalog’s three-level namespace. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "tables", + Short: `A table resides in the third layer of Unity Catalog’s three-level namespace.`, + Long: `A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data. To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT @@ -23,54 +26,58 @@ var Cmd = &cobra.Command{ A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table (rather than a managed or external table).`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq catalog.DeleteTableRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteTableRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteTableRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME", - Short: `Delete a table.`, - Long: `Delete a table. + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a table.` + cmd.Long = `Delete a table. Deletes a table from the specified parent catalog and schema. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the - **USE_SCHEMA** privilege on the parent schema.`, + **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } deleteReq.FullName = args[0] err = w.Tables.Delete(ctx, deleteReq) @@ -78,57 +85,66 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq catalog.GetTableRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetTableRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetTableRequest + // TODO: short flags - getCmd.Flags().BoolVar(&getReq.IncludeDeltaMetadata, "include-delta-metadata", getReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) + cmd.Flags().BoolVar(&getReq.IncludeDeltaMetadata, "include-delta-metadata", getReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) -} - -var getCmd = &cobra.Command{ - Use: "get FULL_NAME", - Short: `Get a table.`, - Long: `Get a table. + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a table.` + cmd.Long = `Get a table. Gets a table from the metastore for a specific catalog and schema. The caller must be a metastore admin, be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, or be the owner of the table and have the - **SELECT** privilege on it as well.`, + **SELECT** privilege on it as well.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } getReq.FullName = args[0] response, err := w.Tables.Get(ctx, getReq) @@ -136,44 +152,66 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq catalog.ListTablesRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListTablesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListTablesRequest + // TODO: short flags - listCmd.Flags().BoolVar(&listReq.IncludeDeltaMetadata, "include-delta-metadata", listReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) - listCmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of tables to return (page length).`) - listCmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().BoolVar(&listReq.IncludeDeltaMetadata, "include-delta-metadata", listReq.IncludeDeltaMetadata, `Whether delta metadata should be included in the response.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Maximum number of tables to return (page length).`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List tables.`, - Long: `List tables. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List tables.` + cmd.Long = `List tables. Gets an array of all tables for the current metastore under the parent catalog and schema. The caller must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a - specific ordering of the elements in the array.`, + specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -185,30 +223,50 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start list-summaries command -var listSummariesReq catalog.ListSummariesRequest -func init() { - Cmd.AddCommand(listSummariesCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listSummariesOverrides []func( + *cobra.Command, + *catalog.ListSummariesRequest, +) + +func newListSummaries() *cobra.Command { + cmd := &cobra.Command{} + + var listSummariesReq catalog.ListSummariesRequest + // TODO: short flags - listSummariesCmd.Flags().IntVar(&listSummariesReq.MaxResults, "max-results", listSummariesReq.MaxResults, `Maximum number of tables to return (page length).`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.PageToken, "page-token", listSummariesReq.PageToken, `Opaque token to send for the next page of results (pagination).`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.SchemaNamePattern, "schema-name-pattern", listSummariesReq.SchemaNamePattern, `A sql LIKE pattern (% and _) for schema names.`) - listSummariesCmd.Flags().StringVar(&listSummariesReq.TableNamePattern, "table-name-pattern", listSummariesReq.TableNamePattern, `A sql LIKE pattern (% and _) for table names.`) + cmd.Flags().IntVar(&listSummariesReq.MaxResults, "max-results", listSummariesReq.MaxResults, `Maximum number of tables to return (page length).`) + cmd.Flags().StringVar(&listSummariesReq.PageToken, "page-token", listSummariesReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().StringVar(&listSummariesReq.SchemaNamePattern, "schema-name-pattern", listSummariesReq.SchemaNamePattern, `A sql LIKE pattern (% and _) for schema names.`) + cmd.Flags().StringVar(&listSummariesReq.TableNamePattern, "table-name-pattern", listSummariesReq.TableNamePattern, `A sql LIKE pattern (% and _) for table names.`) -} - -var listSummariesCmd = &cobra.Command{ - Use: "list-summaries CATALOG_NAME", - Short: `List table summaries.`, - Long: `List table summaries. + cmd.Use = "list-summaries CATALOG_NAME" + cmd.Short = `List table summaries.` + cmd.Long = `List table summaries. Gets an array of summaries for tables for a schema and catalog within the metastore. The table summaries returned are either: @@ -220,31 +278,20 @@ var listSummariesCmd = &cobra.Command{ or **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the **USE_CATALOG** privilege on the parent catalog. - There is no guarantee of a specific ordering of the elements in the array.`, + There is no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No CATALOG_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Name of parent catalog for tables of interest") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have name of parent catalog for tables of interest") - } listSummariesReq.CatalogName = args[0] response, err := w.Tables.ListSummariesAll(ctx, listSummariesReq) @@ -252,60 +299,69 @@ var listSummariesCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listSummariesOverrides { + fn(cmd, &listSummariesReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newListSummaries()) + }) } // start update command -var updateReq catalog.UpdateTableRequest -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateTableRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateTableRequest + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, ``) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, ``) -} - -var updateCmd = &cobra.Command{ - Use: "update FULL_NAME", - Short: `Update a table owner.`, - Long: `Update a table owner. + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a table owner.` + cmd.Long = `Update a table owner. Change the owner of the table. The caller must be the owner of the parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** - privilege on the parent schema.`, + privilege on the parent schema.` // This command is being previewed; hide from help output. - Hidden: true, + cmd.Hidden = true - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." - names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Full name of the table") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have full name of the table") - } updateReq.FullName = args[0] err = w.Tables.Update(ctx, updateReq) @@ -313,10 +369,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Tables diff --git a/cmd/workspace/token-management/overrides.go b/cmd/workspace/token-management/overrides.go index 2070e2a2b..46967d37a 100644 --- a/cmd/workspace/token-management/overrides.go +++ b/cmd/workspace/token-management/overrides.go @@ -1,10 +1,18 @@ package token_management -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *settings.ListTokenManagementRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Created By"}} {{header "Comment"}} {{range .}}{{.TokenId|green}} {{.CreatedByUsername|cyan}} {{.Comment|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/token-management/token-management.go b/cmd/workspace/token-management/token-management.go index b5cc542c1..afd8fdb9e 100755 --- a/cmd/workspace/token-management/token-management.go +++ b/cmd/workspace/token-management/token-management.go @@ -12,47 +12,69 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "token-management", - Short: `Enables administrators to get all tokens and delete tokens for other users.`, - Long: `Enables administrators to get all tokens and delete tokens for other users. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "token-management", + Short: `Enables administrators to get all tokens and delete tokens for other users.`, + Long: `Enables administrators to get all tokens and delete tokens for other users. Admins can either get every token, get a specific token by ID, or get all tokens for a particular user.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create-obo-token command -var createOboTokenReq settings.CreateOboTokenRequest -var createOboTokenJson flags.JsonFlag -func init() { - Cmd.AddCommand(createOboTokenCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOboTokenOverrides []func( + *cobra.Command, + *settings.CreateOboTokenRequest, +) + +func newCreateOboToken() *cobra.Command { + cmd := &cobra.Command{} + + var createOboTokenReq settings.CreateOboTokenRequest + var createOboTokenJson flags.JsonFlag + // TODO: short flags - createOboTokenCmd.Flags().Var(&createOboTokenJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createOboTokenJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createOboTokenCmd.Flags().StringVar(&createOboTokenReq.Comment, "comment", createOboTokenReq.Comment, `Comment that describes the purpose of the token.`) + cmd.Flags().StringVar(&createOboTokenReq.Comment, "comment", createOboTokenReq.Comment, `Comment that describes the purpose of the token.`) -} - -var createOboTokenCmd = &cobra.Command{ - Use: "create-obo-token APPLICATION_ID LIFETIME_SECONDS", - Short: `Create on-behalf token.`, - Long: `Create on-behalf token. + cmd.Use = "create-obo-token APPLICATION_ID LIFETIME_SECONDS" + cmd.Short = `Create on-behalf token.` + cmd.Long = `Create on-behalf token. - Creates a token on behalf of a service principal.`, + Creates a token on behalf of a service principal.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -74,51 +96,60 @@ var createOboTokenCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOboTokenOverrides { + fn(cmd, &createOboTokenReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreateOboToken()) + }) } // start delete command -var deleteReq settings.DeleteTokenManagementRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.DeleteTokenManagementRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.DeleteTokenManagementRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete TOKEN_ID", - Short: `Delete a token.`, - Long: `Delete a token. + cmd.Use = "delete TOKEN_ID" + cmd.Short = `Delete a token.` + cmd.Long = `Delete a token. - Deletes a token, specified by its ID.`, + Deletes a token, specified by its ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No TOKEN_ID argument specified. Loading names for Token Management drop-down." - names, err := w.TokenManagement.TokenInfoCommentToTokenIdMap(ctx, settings.ListTokenManagementRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Token Management drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the token to get") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the token to get") - } deleteReq.TokenId = args[0] err = w.TokenManagement.Delete(ctx, deleteReq) @@ -126,51 +157,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq settings.GetTokenManagementRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *settings.GetTokenManagementRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq settings.GetTokenManagementRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get TOKEN_ID", - Short: `Get token info.`, - Long: `Get token info. + cmd.Use = "get TOKEN_ID" + cmd.Short = `Get token info.` + cmd.Long = `Get token info. - Gets information about a token, specified by its ID.`, + Gets information about a token, specified by its ID.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No TOKEN_ID argument specified. Loading names for Token Management drop-down." - names, err := w.TokenManagement.TokenInfoCommentToTokenIdMap(ctx, settings.ListTokenManagementRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Token Management drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the token to get") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the token to get") - } getReq.TokenId = args[0] response, err := w.TokenManagement.Get(ctx, getReq) @@ -178,43 +218,65 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq settings.ListTokenManagementRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *settings.ListTokenManagementRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq settings.ListTokenManagementRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.CreatedById, "created-by-id", listReq.CreatedById, `User ID of the user that created the token.`) - listCmd.Flags().StringVar(&listReq.CreatedByUsername, "created-by-username", listReq.CreatedByUsername, `Username of the user that created the token.`) + cmd.Flags().StringVar(&listReq.CreatedById, "created-by-id", listReq.CreatedById, `User ID of the user that created the token.`) + cmd.Flags().StringVar(&listReq.CreatedByUsername, "created-by-username", listReq.CreatedByUsername, `Username of the user that created the token.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List all tokens.`, - Long: `List all tokens. + cmd.Use = "list" + cmd.Short = `List all tokens.` + cmd.Long = `List all tokens. - Lists all tokens associated with the specified workspace or user.`, + Lists all tokens associated with the specified workspace or user.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -231,10 +293,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service TokenManagement diff --git a/cmd/workspace/tokens/overrides.go b/cmd/workspace/tokens/overrides.go index b5673c0e9..09c51758e 100644 --- a/cmd/workspace/tokens/overrides.go +++ b/cmd/workspace/tokens/overrides.go @@ -1,10 +1,17 @@ package tokens -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Expiry time"}} {{header "Comment"}} {{range .}}{{.TokenId|green}} {{cyan "%d" .ExpiryTime}} {{.Comment|cyan}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/tokens/tokens.go b/cmd/workspace/tokens/tokens.go index c121793b6..1e6ea7141 100755 --- a/cmd/workspace/tokens/tokens.go +++ b/cmd/workspace/tokens/tokens.go @@ -3,8 +3,6 @@ package tokens import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,50 +10,72 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "tokens", - Short: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, - Long: `The Token API allows you to create, list, and revoke tokens that can be used +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "tokens", + Short: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, + Long: `The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs.`, - Annotations: map[string]string{ - "package": "settings", - }, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq settings.CreateTokenRequest -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *settings.CreateTokenRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq settings.CreateTokenRequest + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Optional description to attach to the token.`) - createCmd.Flags().Int64Var(&createReq.LifetimeSeconds, "lifetime-seconds", createReq.LifetimeSeconds, `The lifetime of the token, in seconds.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `Optional description to attach to the token.`) + cmd.Flags().Int64Var(&createReq.LifetimeSeconds, "lifetime-seconds", createReq.LifetimeSeconds, `The lifetime of the token, in seconds.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a user token.`, - Long: `Create a user token. + cmd.Use = "create" + cmd.Short = `Create a user token.` + cmd.Long = `Create a user token. Creates and returns a token for a user. If this call is made through token authentication, it creates a token with the same client ID as the authenticated token. If the user's token quota is exceeded, this call returns - an error **QUOTA_EXCEEDED**.`, + an error **QUOTA_EXCEEDED**.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -72,36 +92,65 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq settings.RevokeTokenRequest -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *settings.RevokeTokenRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq settings.RevokeTokenRequest + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete TOKEN_ID", - Short: `Revoke token.`, - Long: `Revoke token. + cmd.Use = "delete TOKEN_ID" + cmd.Short = `Revoke token.` + cmd.Long = `Revoke token. Revokes an access token. If a token with the specified ID is not valid, this call returns an error - **RESOURCE_DOES_NOT_EXIST**.`, + **RESOURCE_DOES_NOT_EXIST**.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -111,23 +160,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No TOKEN_ID argument specified. Loading names for Tokens drop-down." - names, err := w.Tokens.TokenInfoCommentToTokenIdMap(ctx) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Tokens drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The ID of the token to be revoked") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the id of the token to be revoked") - } deleteReq.TokenId = args[0] } @@ -136,29 +168,47 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, +) -} +func newList() *cobra.Command { + cmd := &cobra.Command{} -var listCmd = &cobra.Command{ - Use: "list", - Short: `List tokens.`, - Long: `List tokens. + cmd.Use = "list" + cmd.Short = `List tokens.` + cmd.Long = `List tokens. - Lists all the valid tokens for a user-workspace pair.`, + Lists all the valid tokens for a user-workspace pair.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Tokens.ListAll(ctx) @@ -166,10 +216,24 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // end service Tokens diff --git a/cmd/workspace/users/overrides.go b/cmd/workspace/users/overrides.go index 45447a0ae..a985ccf8c 100644 --- a/cmd/workspace/users/overrides.go +++ b/cmd/workspace/users/overrides.go @@ -1,10 +1,18 @@ package users -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *iam.ListUsersRequest) { listReq.Attributes = "id,userName,groups,active" listCmd.Annotations["template"] = cmdio.Heredoc(` {{range .}}{{.Id|green}} {{.UserName}} {{range .Groups}}{{.Display}} {{end}} {{if .Active}}{{"ACTIVE"|green}}{{else}}DISABLED{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/users/users.go b/cmd/workspace/users/users.go index 71fdcf9ed..e81beb02f 100755 --- a/cmd/workspace/users/users.go +++ b/cmd/workspace/users/users.go @@ -3,8 +3,6 @@ package users import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,10 +10,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "users", - Short: `User identities recognized by Databricks and represented by email addresses.`, - Long: `User identities recognized by Databricks and represented by email addresses. +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "users", + Short: `User identities recognized by Databricks and represented by email addresses.`, + Long: `User identities recognized by Databricks and represented by email addresses. Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity provider to your Databricks workspace. SCIM @@ -26,51 +29,68 @@ var Cmd = &cobra.Command{ provider and that user’s account will also be removed from Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from accessing sensitive data.`, - Annotations: map[string]string{ - "package": "iam", - }, + GroupID: "iam", + Annotations: map[string]string{ + "package": "iam", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq iam.User -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *iam.User, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq iam.User + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) - createCmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&createReq.Active, "active", createReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&createReq.DisplayName, "display-name", createReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - createCmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) + cmd.Flags().StringVar(&createReq.ExternalId, "external-id", createReq.ExternalId, ``) // TODO: array: groups - createCmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&createReq.Id, "id", createReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - createCmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&createReq.UserName, "user-name", createReq.UserName, `Email address of the Databricks user.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a new user.`, - Long: `Create a new user. + cmd.Use = "create" + cmd.Short = `Create a new user.` + cmd.Long = `Create a new user. Creates a new user in the Databricks workspace. This new user will also be - added to the Databricks account.`, + added to the Databricks account.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -87,52 +107,61 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq iam.DeleteUserRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *iam.DeleteUserRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq iam.DeleteUserRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a user.`, - Long: `Delete a user. + cmd.Use = "delete ID" + cmd.Short = `Delete a user.` + cmd.Long = `Delete a user. Deletes a user. Deleting a user from a Databricks workspace also removes - objects associated with the user.`, + objects associated with the user.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Users drop-down." - names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks workspace") - } deleteReq.Id = args[0] err = w.Users.Delete(ctx, deleteReq) @@ -140,51 +169,60 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start get command -var getReq iam.GetUserRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *iam.GetUserRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq iam.GetUserRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get user details.`, - Long: `Get user details. + cmd.Use = "get ID" + cmd.Short = `Get user details.` + cmd.Long = `Get user details. - Gets information for a specific user in Databricks workspace.`, + Gets information for a specific user in Databricks workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Users drop-down." - names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks workspace") - } getReq.Id = args[0] response, err := w.Users.Get(ctx, getReq) @@ -192,48 +230,70 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start list command -var listReq iam.ListUsersRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *iam.ListUsersRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq iam.ListUsersRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) - listCmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) - listCmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) - listCmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) - listCmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) - listCmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) - listCmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) + cmd.Flags().StringVar(&listReq.Attributes, "attributes", listReq.Attributes, `Comma-separated list of attributes to return in response.`) + cmd.Flags().IntVar(&listReq.Count, "count", listReq.Count, `Desired number of results per page.`) + cmd.Flags().StringVar(&listReq.ExcludedAttributes, "excluded-attributes", listReq.ExcludedAttributes, `Comma-separated list of attributes to exclude in response.`) + cmd.Flags().StringVar(&listReq.Filter, "filter", listReq.Filter, `Query by which the results have to be filtered.`) + cmd.Flags().StringVar(&listReq.SortBy, "sort-by", listReq.SortBy, `Attribute to sort the results.`) + cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order to sort the results.`) + cmd.Flags().IntVar(&listReq.StartIndex, "start-index", listReq.StartIndex, `Specifies the index of the first result.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List users.`, - Long: `List users. + cmd.Use = "list" + cmd.Short = `List users.` + cmd.Long = `List users. - Gets details for all the users associated with a Databricks workspace.`, + Gets details for all the users associated with a Databricks workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -250,37 +310,63 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start patch command -var patchReq iam.PartialUpdate -var patchJson flags.JsonFlag -func init() { - Cmd.AddCommand(patchCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var patchOverrides []func( + *cobra.Command, + *iam.PartialUpdate, +) + +func newPatch() *cobra.Command { + cmd := &cobra.Command{} + + var patchReq iam.PartialUpdate + var patchJson flags.JsonFlag + // TODO: short flags - patchCmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations // TODO: array: schema -} - -var patchCmd = &cobra.Command{ - Use: "patch ID", - Short: `Update user details.`, - Long: `Update user details. + cmd.Use = "patch ID" + cmd.Short = `Update user details.` + cmd.Long = `Update user details. Partially updates a user resource by applying the supplied operations on - specific user attributes.`, + specific user attributes.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -290,23 +376,6 @@ var patchCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Users drop-down." - names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have unique id for a user in the databricks workspace") - } patchReq.Id = args[0] err = w.Users.Patch(ctx, patchReq) @@ -314,44 +383,73 @@ var patchCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range patchOverrides { + fn(cmd, &patchReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newPatch()) + }) } // start update command -var updateReq iam.User -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *iam.User, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq iam.User + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) - updateCmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) - updateCmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) + cmd.Flags().BoolVar(&updateReq.Active, "active", updateReq.Active, `If this user is active.`) + cmd.Flags().StringVar(&updateReq.DisplayName, "display-name", updateReq.DisplayName, `String that represents a concatenation of given and family names.`) // TODO: array: emails // TODO: array: entitlements - updateCmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) + cmd.Flags().StringVar(&updateReq.ExternalId, "external-id", updateReq.ExternalId, ``) // TODO: array: groups - updateCmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) + cmd.Flags().StringVar(&updateReq.Id, "id", updateReq.Id, `Databricks user ID.`) // TODO: complex arg: name // TODO: array: roles - updateCmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) + cmd.Flags().StringVar(&updateReq.UserName, "user-name", updateReq.UserName, `Email address of the Databricks user.`) -} - -var updateCmd = &cobra.Command{ - Use: "update ID", - Short: `Replace a user.`, - Long: `Replace a user. + cmd.Use = "update ID" + cmd.Short = `Replace a user.` + cmd.Long = `Replace a user. - Replaces a user's information with the data supplied in request.`, + Replaces a user's information with the data supplied in request.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -361,23 +459,6 @@ var updateCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Users drop-down." - names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Databricks user ID") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have databricks user id") - } updateReq.Id = args[0] } @@ -386,10 +467,24 @@ var updateCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Users diff --git a/cmd/workspace/volumes/volumes.go b/cmd/workspace/volumes/volumes.go index e020700aa..72c1ff7c4 100755 --- a/cmd/workspace/volumes/volumes.go +++ b/cmd/workspace/volumes/volumes.go @@ -12,10 +12,15 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "volumes", - Short: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.`, - Long: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "volumes", + Short: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files.`, + Long: `Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF files, organizing data sets during the data exploration stages in data science, working with @@ -23,32 +28,47 @@ var Cmd = &cobra.Command{ storing library and config files of arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or transforming and querying non-tabular data files in ETL.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, - // This service is being previewed; hide from help output. - Hidden: true, + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq catalog.CreateVolumeRequestContent -var createJson flags.JsonFlag -func init() { - Cmd.AddCommand(createCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateVolumeRequestContent, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateVolumeRequestContent + var createJson flags.JsonFlag + // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the volume.`) - createCmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud.`) + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the volume.`) + cmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud.`) -} - -var createCmd = &cobra.Command{ - Use: "create CATALOG_NAME NAME SCHEMA_NAME VOLUME_TYPE", - Short: `Create a Volume.`, - Long: `Create a Volume. + cmd.Use = "create CATALOG_NAME NAME SCHEMA_NAME VOLUME_TYPE" + cmd.Short = `Create a Volume.` + cmd.Long = `Create a Volume. Creates a new volume. @@ -67,18 +87,20 @@ var createCmd = &cobra.Command{ must have **CREATE EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes existing in the specified storage location. - The specified storage location is not under the location of other - tables, nor volumes, or catalogs or schemas.`, + tables, nor volumes, or catalogs or schemas.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(4) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -102,55 +124,64 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq catalog.DeleteVolumeRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteVolumeRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteVolumeRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete FULL_NAME_ARG", - Short: `Delete a Volume.`, - Long: `Delete a Volume. + cmd.Use = "delete FULL_NAME_ARG" + cmd.Short = `Delete a Volume.` + cmd.Long = `Delete a Volume. Deletes a volume from the specified parent catalog and schema. The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege - on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.`, + on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." - names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") - } deleteReq.FullNameArg = args[0] err = w.Volumes.Delete(ctx, deleteReq) @@ -158,25 +189,45 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start list command -var listReq catalog.ListVolumesRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListVolumesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListVolumesRequest + // TODO: short flags -} - -var listCmd = &cobra.Command{ - Use: "list CATALOG_NAME SCHEMA_NAME", - Short: `List Volumes.`, - Long: `List Volumes. + cmd.Use = "list CATALOG_NAME SCHEMA_NAME" + cmd.Short = `List Volumes.` + cmd.Long = `List Volumes. Gets an array of all volumes for the current metastore under the parent catalog and schema. @@ -188,15 +239,17 @@ var listCmd = &cobra.Command{ also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - There is no guarantee of a specific ordering of the elements in the array.`, + There is no guarantee of a specific ordering of the elements in the array.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(2) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -208,56 +261,65 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start read command -var readReq catalog.ReadVolumeRequest -func init() { - Cmd.AddCommand(readCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readOverrides []func( + *cobra.Command, + *catalog.ReadVolumeRequest, +) + +func newRead() *cobra.Command { + cmd := &cobra.Command{} + + var readReq catalog.ReadVolumeRequest + // TODO: short flags -} - -var readCmd = &cobra.Command{ - Use: "read FULL_NAME_ARG", - Short: `Get a Volume.`, - Long: `Get a Volume. + cmd.Use = "read FULL_NAME_ARG" + cmd.Short = `Get a Volume.` + cmd.Long = `Get a Volume. Gets a volume from the metastore for a specific catalog and schema. The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and - the **USE_SCHEMA** privilege on the parent schema.`, + the **USE_SCHEMA** privilege on the parent schema.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." - names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") - } readReq.FullNameArg = args[0] response, err := w.Volumes.Read(ctx, readReq) @@ -265,29 +327,49 @@ var readCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readOverrides { + fn(cmd, &readReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRead()) + }) } // start update command -var updateReq catalog.UpdateVolumeRequestContent -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateVolumeRequestContent, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateVolumeRequestContent + // TODO: short flags - updateCmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the volume.`) - updateCmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the volume.`) - updateCmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the volume.`) + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the volume.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the volume.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the volume.`) -} - -var updateCmd = &cobra.Command{ - Use: "update FULL_NAME_ARG", - Short: `Update a Volume.`, - Long: `Update a Volume. + cmd.Use = "update FULL_NAME_ARG" + cmd.Short = `Update a Volume.` + cmd.Long = `Update a Volume. Updates the specified volume under the specified parent catalog and schema. @@ -296,31 +378,20 @@ var updateCmd = &cobra.Command{ on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. Currently only the name, the owner or the comment of the volume could be - updated.`, + updated.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." - names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") - } updateReq.FullNameArg = args[0] response, err := w.Volumes.Update(ctx, updateReq) @@ -328,10 +399,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service Volumes diff --git a/cmd/workspace/warehouses/overrides.go b/cmd/workspace/warehouses/overrides.go index 82319d6fc..0714937c2 100644 --- a/cmd/workspace/warehouses/overrides.go +++ b/cmd/workspace/warehouses/overrides.go @@ -1,10 +1,18 @@ package warehouses -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *sql.ListWarehousesRequest) { listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Name"}} {{header "Size"}} {{header "State"}} {{range .}}{{.Id|green}} {{.Name|cyan}} {{.ClusterSize|cyan}} {{if eq .State "RUNNING"}}{{"RUNNING"|green}}{{else if eq .State "STOPPED"}}{{"STOPPED"|red}}{{else}}{{blue "%s" .State}}{{end}} {{end}}`) } + +func init() { + listOverrides = append(listOverrides, listOverride) +} diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index a29c4031e..1d7dde033 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -13,65 +13,86 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "warehouses", - Short: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.`, - Long: `A SQL warehouse is a compute resource that lets you run SQL commands on data +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "warehouses", + Short: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL.`, + Long: `A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.`, - Annotations: map[string]string{ - "package": "sql", - }, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start create command -var createReq sql.CreateWarehouseRequest -var createJson flags.JsonFlag -var createSkipWait bool -var createTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateWarehouseRequest, +) -func init() { - Cmd.AddCommand(createCmd) +func newCreate() *cobra.Command { + cmd := &cobra.Command{} - createCmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) - createCmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var createReq sql.CreateWarehouseRequest + var createJson flags.JsonFlag + + var createSkipWait bool + var createTimeout time.Duration + + cmd.Flags().BoolVar(&createSkipWait, "no-wait", createSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&createTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - createCmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - createCmd.Flags().IntVar(&createReq.AutoStopMins, "auto-stop-mins", createReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) + cmd.Flags().IntVar(&createReq.AutoStopMins, "auto-stop-mins", createReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) // TODO: complex arg: channel - createCmd.Flags().StringVar(&createReq.ClusterSize, "cluster-size", createReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) - createCmd.Flags().StringVar(&createReq.CreatorName, "creator-name", createReq.CreatorName, `warehouse creator name.`) - createCmd.Flags().BoolVar(&createReq.EnablePhoton, "enable-photon", createReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) - createCmd.Flags().BoolVar(&createReq.EnableServerlessCompute, "enable-serverless-compute", createReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) - createCmd.Flags().StringVar(&createReq.InstanceProfileArn, "instance-profile-arn", createReq.InstanceProfileArn, `Deprecated.`) - createCmd.Flags().IntVar(&createReq.MaxNumClusters, "max-num-clusters", createReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) - createCmd.Flags().IntVar(&createReq.MinNumClusters, "min-num-clusters", createReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) - createCmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Logical name for the cluster.`) - createCmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) + cmd.Flags().StringVar(&createReq.ClusterSize, "cluster-size", createReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) + cmd.Flags().StringVar(&createReq.CreatorName, "creator-name", createReq.CreatorName, `warehouse creator name.`) + cmd.Flags().BoolVar(&createReq.EnablePhoton, "enable-photon", createReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) + cmd.Flags().BoolVar(&createReq.EnableServerlessCompute, "enable-serverless-compute", createReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) + cmd.Flags().StringVar(&createReq.InstanceProfileArn, "instance-profile-arn", createReq.InstanceProfileArn, `Deprecated.`) + cmd.Flags().IntVar(&createReq.MaxNumClusters, "max-num-clusters", createReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) + cmd.Flags().IntVar(&createReq.MinNumClusters, "min-num-clusters", createReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) + cmd.Flags().StringVar(&createReq.Name, "name", createReq.Name, `Logical name for the cluster.`) + cmd.Flags().Var(&createReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) // TODO: complex arg: tags - createCmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) + cmd.Flags().Var(&createReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) -} - -var createCmd = &cobra.Command{ - Use: "create", - Short: `Create a warehouse.`, - Long: `Create a warehouse. + cmd.Use = "create" + cmd.Short = `Create a warehouse.` + cmd.Long = `Create a warehouse. - Creates a new SQL warehouse.`, + Creates a new SQL warehouse.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -107,51 +128,60 @@ var createCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) } // start delete command -var deleteReq sql.DeleteWarehouseRequest -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteWarehouseRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteWarehouseRequest + // TODO: short flags -} - -var deleteCmd = &cobra.Command{ - Use: "delete ID", - Short: `Delete a warehouse.`, - Long: `Delete a warehouse. + cmd.Use = "delete ID" + cmd.Short = `Delete a warehouse.` + cmd.Long = `Delete a warehouse. - Deletes a SQL warehouse.`, + Deletes a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." - names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have required") - } deleteReq.Id = args[0] err = w.Warehouses.Delete(ctx, deleteReq) @@ -159,53 +189,78 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start edit command -var editReq sql.EditWarehouseRequest -var editJson flags.JsonFlag -var editSkipWait bool -var editTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var editOverrides []func( + *cobra.Command, + *sql.EditWarehouseRequest, +) -func init() { - Cmd.AddCommand(editCmd) +func newEdit() *cobra.Command { + cmd := &cobra.Command{} - editCmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) - editCmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var editReq sql.EditWarehouseRequest + var editJson flags.JsonFlag + + var editSkipWait bool + var editTimeout time.Duration + + cmd.Flags().BoolVar(&editSkipWait, "no-wait", editSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&editTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags - editCmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&editJson, "json", `either inline JSON string or @path/to/file.json with request body`) - editCmd.Flags().IntVar(&editReq.AutoStopMins, "auto-stop-mins", editReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) + cmd.Flags().IntVar(&editReq.AutoStopMins, "auto-stop-mins", editReq.AutoStopMins, `The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it is automatically stopped.`) // TODO: complex arg: channel - editCmd.Flags().StringVar(&editReq.ClusterSize, "cluster-size", editReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) - editCmd.Flags().StringVar(&editReq.CreatorName, "creator-name", editReq.CreatorName, `warehouse creator name.`) - editCmd.Flags().BoolVar(&editReq.EnablePhoton, "enable-photon", editReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) - editCmd.Flags().BoolVar(&editReq.EnableServerlessCompute, "enable-serverless-compute", editReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) - editCmd.Flags().StringVar(&editReq.InstanceProfileArn, "instance-profile-arn", editReq.InstanceProfileArn, `Deprecated.`) - editCmd.Flags().IntVar(&editReq.MaxNumClusters, "max-num-clusters", editReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) - editCmd.Flags().IntVar(&editReq.MinNumClusters, "min-num-clusters", editReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) - editCmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Logical name for the cluster.`) - editCmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) + cmd.Flags().StringVar(&editReq.ClusterSize, "cluster-size", editReq.ClusterSize, `Size of the clusters allocated for this warehouse.`) + cmd.Flags().StringVar(&editReq.CreatorName, "creator-name", editReq.CreatorName, `warehouse creator name.`) + cmd.Flags().BoolVar(&editReq.EnablePhoton, "enable-photon", editReq.EnablePhoton, `Configures whether the warehouse should use Photon optimized clusters.`) + cmd.Flags().BoolVar(&editReq.EnableServerlessCompute, "enable-serverless-compute", editReq.EnableServerlessCompute, `Configures whether the warehouse should use serverless compute.`) + cmd.Flags().StringVar(&editReq.InstanceProfileArn, "instance-profile-arn", editReq.InstanceProfileArn, `Deprecated.`) + cmd.Flags().IntVar(&editReq.MaxNumClusters, "max-num-clusters", editReq.MaxNumClusters, `Maximum number of clusters that the autoscaler will create to handle concurrent queries.`) + cmd.Flags().IntVar(&editReq.MinNumClusters, "min-num-clusters", editReq.MinNumClusters, `Minimum number of available clusters that will be maintained for this SQL warehouse.`) + cmd.Flags().StringVar(&editReq.Name, "name", editReq.Name, `Logical name for the cluster.`) + cmd.Flags().Var(&editReq.SpotInstancePolicy, "spot-instance-policy", `Configurations whether the warehouse should use spot instances.`) // TODO: complex arg: tags - editCmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) + cmd.Flags().Var(&editReq.WarehouseType, "warehouse-type", `Warehouse type: PRO or CLASSIC.`) -} - -var editCmd = &cobra.Command{ - Use: "edit ID", - Short: `Update a warehouse.`, - Long: `Update a warehouse. + cmd.Use = "edit ID" + cmd.Short = `Update a warehouse.` + cmd.Long = `Update a warehouse. - Updates the configuration for a SQL warehouse.`, + Updates the configuration for a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -215,23 +270,6 @@ var editCmd = &cobra.Command{ return err } } - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." - names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have required") - } editReq.Id = args[0] wait, err := w.Warehouses.Edit(ctx, editReq) @@ -258,57 +296,65 @@ var editCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range editOverrides { + fn(cmd, &editReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newEdit()) + }) } // start get command -var getReq sql.GetWarehouseRequest -var getSkipWait bool -var getTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *sql.GetWarehouseRequest, +) -func init() { - Cmd.AddCommand(getCmd) +func newGet() *cobra.Command { + cmd := &cobra.Command{} - getCmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) - getCmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var getReq sql.GetWarehouseRequest + + var getSkipWait bool + var getTimeout time.Duration + + cmd.Flags().BoolVar(&getSkipWait, "no-wait", getSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&getTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get ID", - Short: `Get warehouse info.`, - Long: `Get warehouse info. + cmd.Use = "get ID" + cmd.Short = `Get warehouse info.` + cmd.Long = `Get warehouse info. - Gets the information for a single SQL warehouse.`, + Gets the information for a single SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." - names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have required") - } getReq.Id = args[0] response, err := w.Warehouses.Get(ctx, getReq) @@ -316,30 +362,48 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start get-workspace-warehouse-config command -func init() { - Cmd.AddCommand(getWorkspaceWarehouseConfigCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getWorkspaceWarehouseConfigOverrides []func( + *cobra.Command, +) -} +func newGetWorkspaceWarehouseConfig() *cobra.Command { + cmd := &cobra.Command{} -var getWorkspaceWarehouseConfigCmd = &cobra.Command{ - Use: "get-workspace-warehouse-config", - Short: `Get the workspace configuration.`, - Long: `Get the workspace configuration. + cmd.Use = "get-workspace-warehouse-config" + cmd.Short = `Get the workspace configuration.` + cmd.Long = `Get the workspace configuration. Gets the workspace level configuration that is shared by all SQL warehouses in - a workspace.`, + a workspace.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) response, err := w.Warehouses.GetWorkspaceWarehouseConfig(ctx) @@ -347,42 +411,64 @@ var getWorkspaceWarehouseConfigCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getWorkspaceWarehouseConfigOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetWorkspaceWarehouseConfig()) + }) } // start list command -var listReq sql.ListWarehousesRequest -var listJson flags.JsonFlag -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *sql.ListWarehousesRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq sql.ListWarehousesRequest + var listJson flags.JsonFlag + // TODO: short flags - listCmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) - listCmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Service Principal which will be used to fetch the list of warehouses.`) + cmd.Flags().IntVar(&listReq.RunAsUserId, "run-as-user-id", listReq.RunAsUserId, `Service Principal which will be used to fetch the list of warehouses.`) -} - -var listCmd = &cobra.Command{ - Use: "list", - Short: `List warehouses.`, - Long: `List warehouses. + cmd.Use = "list" + cmd.Short = `List warehouses.` + cmd.Long = `List warehouses. - Lists all SQL warehouses that a user has manager permissions on.`, + Lists all SQL warehouses that a user has manager permissions on.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -399,51 +485,73 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start set-workspace-warehouse-config command -var setWorkspaceWarehouseConfigReq sql.SetWorkspaceWarehouseConfigRequest -var setWorkspaceWarehouseConfigJson flags.JsonFlag -func init() { - Cmd.AddCommand(setWorkspaceWarehouseConfigCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setWorkspaceWarehouseConfigOverrides []func( + *cobra.Command, + *sql.SetWorkspaceWarehouseConfigRequest, +) + +func newSetWorkspaceWarehouseConfig() *cobra.Command { + cmd := &cobra.Command{} + + var setWorkspaceWarehouseConfigReq sql.SetWorkspaceWarehouseConfigRequest + var setWorkspaceWarehouseConfigJson flags.JsonFlag + // TODO: short flags - setWorkspaceWarehouseConfigCmd.Flags().Var(&setWorkspaceWarehouseConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setWorkspaceWarehouseConfigJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: complex arg: channel // TODO: complex arg: config_param // TODO: array: data_access_config // TODO: array: enabled_warehouse_types // TODO: complex arg: global_param - setWorkspaceWarehouseConfigCmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.GoogleServiceAccount, "google-service-account", setWorkspaceWarehouseConfigReq.GoogleServiceAccount, `GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage.`) - setWorkspaceWarehouseConfigCmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.InstanceProfileArn, "instance-profile-arn", setWorkspaceWarehouseConfigReq.InstanceProfileArn, `AWS Only: Instance profile used to pass IAM role to the cluster.`) - setWorkspaceWarehouseConfigCmd.Flags().Var(&setWorkspaceWarehouseConfigReq.SecurityPolicy, "security-policy", `Security policy for warehouses.`) + cmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.GoogleServiceAccount, "google-service-account", setWorkspaceWarehouseConfigReq.GoogleServiceAccount, `GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage.`) + cmd.Flags().StringVar(&setWorkspaceWarehouseConfigReq.InstanceProfileArn, "instance-profile-arn", setWorkspaceWarehouseConfigReq.InstanceProfileArn, `AWS Only: Instance profile used to pass IAM role to the cluster.`) + cmd.Flags().Var(&setWorkspaceWarehouseConfigReq.SecurityPolicy, "security-policy", `Security policy for warehouses.`) // TODO: complex arg: sql_configuration_parameters -} - -var setWorkspaceWarehouseConfigCmd = &cobra.Command{ - Use: "set-workspace-warehouse-config", - Short: `Set the workspace configuration.`, - Long: `Set the workspace configuration. + cmd.Use = "set-workspace-warehouse-config" + cmd.Short = `Set the workspace configuration.` + cmd.Long = `Set the workspace configuration. Sets the workspace level configuration that is shared by all SQL warehouses in - a workspace.`, + a workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -460,57 +568,65 @@ var setWorkspaceWarehouseConfigCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setWorkspaceWarehouseConfigOverrides { + fn(cmd, &setWorkspaceWarehouseConfigReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetWorkspaceWarehouseConfig()) + }) } // start start command -var startReq sql.StartRequest -var startSkipWait bool -var startTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var startOverrides []func( + *cobra.Command, + *sql.StartRequest, +) -func init() { - Cmd.AddCommand(startCmd) +func newStart() *cobra.Command { + cmd := &cobra.Command{} - startCmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) - startCmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) + var startReq sql.StartRequest + + var startSkipWait bool + var startTimeout time.Duration + + cmd.Flags().BoolVar(&startSkipWait, "no-wait", startSkipWait, `do not wait to reach RUNNING state`) + cmd.Flags().DurationVar(&startTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags -} - -var startCmd = &cobra.Command{ - Use: "start ID", - Short: `Start a warehouse.`, - Long: `Start a warehouse. + cmd.Use = "start ID" + cmd.Short = `Start a warehouse.` + cmd.Long = `Start a warehouse. - Starts a SQL warehouse.`, + Starts a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." - names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have required") - } startReq.Id = args[0] wait, err := w.Warehouses.Start(ctx, startReq) @@ -537,57 +653,65 @@ var startCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range startOverrides { + fn(cmd, &startReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStart()) + }) } // start stop command -var stopReq sql.StopRequest -var stopSkipWait bool -var stopTimeout time.Duration +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var stopOverrides []func( + *cobra.Command, + *sql.StopRequest, +) -func init() { - Cmd.AddCommand(stopCmd) +func newStop() *cobra.Command { + cmd := &cobra.Command{} - stopCmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach STOPPED state`) - stopCmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach STOPPED state`) + var stopReq sql.StopRequest + + var stopSkipWait bool + var stopTimeout time.Duration + + cmd.Flags().BoolVar(&stopSkipWait, "no-wait", stopSkipWait, `do not wait to reach STOPPED state`) + cmd.Flags().DurationVar(&stopTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach STOPPED state`) // TODO: short flags -} - -var stopCmd = &cobra.Command{ - Use: "stop ID", - Short: `Stop a warehouse.`, - Long: `Stop a warehouse. + cmd.Use = "stop ID" + cmd.Short = `Stop a warehouse.` + cmd.Long = `Stop a warehouse. - Stops a SQL warehouse.`, + Stops a SQL warehouse.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." - names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "Required") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have required") - } stopReq.Id = args[0] wait, err := w.Warehouses.Stop(ctx, stopReq) @@ -614,10 +738,24 @@ var stopCmd = &cobra.Command{ return err } return cmdio.Render(ctx, info) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range stopOverrides { + fn(cmd, &stopReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newStop()) + }) } // end service Warehouses diff --git a/cmd/workspace/workspace-bindings/workspace-bindings.go b/cmd/workspace/workspace-bindings/workspace-bindings.go index 8780106b1..3d7fa677c 100755 --- a/cmd/workspace/workspace-bindings/workspace-bindings.go +++ b/cmd/workspace/workspace-bindings/workspace-bindings.go @@ -10,44 +10,66 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-bindings", - Short: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__.`, - Long: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-bindings", + Short: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__.`, + Long: `A catalog in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ catalog can be accessed from any workspace, while an __ISOLATED__ catalog can only be access from a configured list of workspaces. A catalog's workspace bindings can be configured by a metastore admin or the owner of the catalog.`, - Annotations: map[string]string{ - "package": "catalog", - }, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get command -var getReq catalog.GetWorkspaceBindingRequest -func init() { - Cmd.AddCommand(getCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetWorkspaceBindingRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetWorkspaceBindingRequest + // TODO: short flags -} - -var getCmd = &cobra.Command{ - Use: "get NAME", - Short: `Get catalog workspace bindings.`, - Long: `Get catalog workspace bindings. + cmd.Use = "get NAME" + cmd.Short = `Get catalog workspace bindings.` + cmd.Long = `Get catalog workspace bindings. Gets workspace bindings of the catalog. The caller must be a metastore admin - or an owner of the catalog.`, + or an owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -58,41 +80,63 @@ var getCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) } // start update command -var updateReq catalog.UpdateWorkspaceBindings -var updateJson flags.JsonFlag -func init() { - Cmd.AddCommand(updateCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateWorkspaceBindings, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateWorkspaceBindings + var updateJson flags.JsonFlag + // TODO: short flags - updateCmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: assign_workspaces // TODO: array: unassign_workspaces -} - -var updateCmd = &cobra.Command{ - Use: "update NAME", - Short: `Update catalog workspace bindings.`, - Long: `Update catalog workspace bindings. + cmd.Use = "update NAME" + cmd.Short = `Update catalog workspace bindings.` + cmd.Long = `Update catalog workspace bindings. Updates workspace bindings of the catalog. The caller must be a metastore - admin or an owner of the catalog.`, + admin or an owner of the catalog.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -109,10 +153,24 @@ var updateCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) } // end service WorkspaceBindings diff --git a/cmd/workspace/workspace-conf/workspace-conf.go b/cmd/workspace/workspace-conf/workspace-conf.go index f2f0bb759..d828f66ea 100755 --- a/cmd/workspace/workspace-conf/workspace-conf.go +++ b/cmd/workspace/workspace-conf/workspace-conf.go @@ -10,38 +10,60 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace-conf", - Short: `This API allows updating known workspace settings for advanced users.`, - Long: `This API allows updating known workspace settings for advanced users.`, - Annotations: map[string]string{ - "package": "settings", - }, +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace-conf", + Short: `This API allows updating known workspace settings for advanced users.`, + Long: `This API allows updating known workspace settings for advanced users.`, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start get-status command -var getStatusReq settings.GetStatusRequest -func init() { - Cmd.AddCommand(getStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getStatusOverrides []func( + *cobra.Command, + *settings.GetStatusRequest, +) + +func newGetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var getStatusReq settings.GetStatusRequest + // TODO: short flags -} - -var getStatusCmd = &cobra.Command{ - Use: "get-status KEYS", - Short: `Check configuration status.`, - Long: `Check configuration status. + cmd.Use = "get-status KEYS" + cmd.Short = `Check configuration status.` + cmd.Long = `Check configuration status. - Gets the configuration status for a workspace.`, + Gets the configuration status for a workspace.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -52,41 +74,63 @@ var getStatusCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getStatusOverrides { + fn(cmd, &getStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetStatus()) + }) } // start set-status command -var setStatusReq settings.WorkspaceConf -var setStatusJson flags.JsonFlag -func init() { - Cmd.AddCommand(setStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setStatusOverrides []func( + *cobra.Command, + *settings.WorkspaceConf, +) + +func newSetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var setStatusReq settings.WorkspaceConf + var setStatusJson flags.JsonFlag + // TODO: short flags - setStatusCmd.Flags().Var(&setStatusJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&setStatusJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var setStatusCmd = &cobra.Command{ - Use: "set-status", - Short: `Enable/disable features.`, - Long: `Enable/disable features. + cmd.Use = "set-status" + cmd.Short = `Enable/disable features.` + cmd.Long = `Enable/disable features. Sets the configuration status for a workspace, including enabling or disabling - it.`, + it.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(0) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -103,10 +147,24 @@ var setStatusCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setStatusOverrides { + fn(cmd, &setStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetStatus()) + }) } // end service WorkspaceConf diff --git a/cmd/workspace/workspace/export_dir.go b/cmd/workspace/workspace/export_dir.go index 1c3fe968f..4f50a96e4 100644 --- a/cmd/workspace/workspace/export_dir.go +++ b/cmd/workspace/workspace/export_dir.go @@ -15,9 +15,19 @@ import ( "github.com/spf13/cobra" ) +type exportDirOptions struct { + sourceDir string + targetDir string + overwrite bool +} + // The callback function exports the file specified at relPath. This function is // meant to be used in conjunction with fs.WalkDir -func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceDir, targetDir string) func(string, fs.DirEntry, error) error { +func (opts exportDirOptions) callback(ctx context.Context, workspaceFiler filer.Filer) func(string, fs.DirEntry, error) error { + sourceDir := opts.sourceDir + targetDir := opts.targetDir + overwrite := opts.overwrite + return func(relPath string, d fs.DirEntry, err error) error { if err != nil { return err @@ -55,7 +65,7 @@ func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD // Skip file if a file already exists in path. // os.Stat returns a fs.ErrNotExist if a file does not exist at path. // If a file exists, and overwrite is not set, we skip exporting the file - if _, err := os.Stat(targetPath); err == nil && !exportOverwrite { + if _, err := os.Stat(targetPath); err == nil && !overwrite { // Log event that this file/directory has been skipped return cmdio.RenderWithTemplate(ctx, newFileSkippedEvent(relPath, targetPath), "{{.SourcePath}} -> {{.TargetPath}} (skipped; already exists)\n") } @@ -80,46 +90,56 @@ func exportFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD } } -var exportDirCommand = &cobra.Command{ - Use: "export-dir SOURCE_PATH TARGET_PATH", - Short: `Export a directory from a Databricks workspace to the local file system.`, - Long: ` -Export a directory recursively from a Databricks workspace to the local file system. -Notebooks will have one of the following extensions added .scala, .py, .sql, or .r -based on the language type. -`, - PreRunE: root.MustWorkspaceClient, - Args: cobra.ExactArgs(2), - RunE: func(cmd *cobra.Command, args []string) (err error) { +func newExportDir() *cobra.Command { + cmd := &cobra.Command{} + + var opts exportDirOptions + + cmd.Flags().BoolVar(&opts.overwrite, "overwrite", false, "overwrite existing local files") + + cmd.Use = "export-dir SOURCE_PATH TARGET_PATH" + cmd.Short = `Export a directory from a Databricks workspace to the local file system.` + cmd.Long = ` + Export a directory recursively from a Databricks workspace to the local file system. + Notebooks will have one of the following extensions added .scala, .py, .sql, or .r + based on the language type. + ` + + cmd.Annotations = make(map[string]string) + cmd.Args = cobra.ExactArgs(2) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - sourceDir := args[0] - targetDir := args[1] + opts.sourceDir = args[0] + opts.targetDir = args[1] // Initialize a filer and a file system on the source directory - workspaceFiler, err := filer.NewWorkspaceFilesClient(w, sourceDir) + workspaceFiler, err := filer.NewWorkspaceFilesClient(w, opts.sourceDir) if err != nil { return err } workspaceFS := filer.NewFS(ctx, workspaceFiler) // TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448 - err = cmdio.RenderJson(ctx, newExportStartedEvent(sourceDir)) + err = cmdio.RenderJson(ctx, newExportStartedEvent(opts.sourceDir)) if err != nil { return err } - err = fs.WalkDir(workspaceFS, ".", exportFileCallback(ctx, workspaceFiler, sourceDir, targetDir)) + err = fs.WalkDir(workspaceFS, ".", opts.callback(ctx, workspaceFiler)) if err != nil { return err } - return cmdio.RenderJson(ctx, newExportCompletedEvent(targetDir)) - }, + return cmdio.RenderJson(ctx, newExportCompletedEvent(opts.targetDir)) + } + + return cmd } -var exportOverwrite bool - func init() { - exportDirCommand.Flags().BoolVar(&exportOverwrite, "overwrite", false, "overwrite existing local files") - Cmd.AddCommand(exportDirCommand) + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExportDir()) + }) } diff --git a/cmd/workspace/workspace/import_dir.go b/cmd/workspace/workspace/import_dir.go index af9c38ca3..bc0b80667 100644 --- a/cmd/workspace/workspace/import_dir.go +++ b/cmd/workspace/workspace/import_dir.go @@ -16,6 +16,12 @@ import ( "github.com/spf13/cobra" ) +type importDirOptions struct { + sourceDir string + targetDir string + overwrite bool +} + // The callback function imports the file specified at sourcePath. This function is // meant to be used in conjunction with fs.WalkDir // @@ -31,7 +37,11 @@ import ( // 1. Read the notebook, referring to it using it's local name "foo\\myNotebook.py" // 2. API call to import the notebook to the workspace, using it API payload name "foo/myNotebook.py" // 3. The notebook is materialized in the workspace using it's remote name "foo/myNotebook" -func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceDir, targetDir string) func(string, fs.DirEntry, error) error { +func (opts importDirOptions) callback(ctx context.Context, workspaceFiler filer.Filer) func(string, fs.DirEntry, error) error { + sourceDir := opts.sourceDir + targetDir := opts.targetDir + overwrite := opts.overwrite + return func(sourcePath string, d fs.DirEntry, err error) error { if err != nil { return err @@ -72,7 +82,7 @@ func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD defer f.Close() // Create file in WSFS - if importOverwrite { + if overwrite { err = workspaceFiler.Write(ctx, nameForApiCall, f, filer.OverwriteIfExists) if err != nil { return err @@ -94,45 +104,55 @@ func importFileCallback(ctx context.Context, workspaceFiler filer.Filer, sourceD } } -var importDirCommand = &cobra.Command{ - Use: "import-dir SOURCE_PATH TARGET_PATH", - Short: `Import a directory from the local filesystem to a Databricks workspace.`, - Long: ` +func newImportDir() *cobra.Command { + cmd := &cobra.Command{} + + var opts importDirOptions + + cmd.Flags().BoolVar(&opts.overwrite, "overwrite", false, "overwrite existing workspace files") + + cmd.Use = "import-dir SOURCE_PATH TARGET_PATH" + cmd.Short = `Import a directory from the local filesystem to a Databricks workspace.` + cmd.Long = ` Import a directory recursively from the local file system to a Databricks workspace. Notebooks will have their extensions (one of .scala, .py, .sql, .ipynb, .r) stripped -`, - PreRunE: root.MustWorkspaceClient, - Args: cobra.ExactArgs(2), - RunE: func(cmd *cobra.Command, args []string) (err error) { +` + + cmd.Annotations = make(map[string]string) + cmd.Args = cobra.ExactArgs(2) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - sourceDir := args[0] - targetDir := args[1] + opts.sourceDir = args[0] + opts.targetDir = args[1] // Initialize a filer rooted at targetDir - workspaceFiler, err := filer.NewWorkspaceFilesClient(w, targetDir) + workspaceFiler, err := filer.NewWorkspaceFilesClient(w, opts.targetDir) if err != nil { return err } // TODO: print progress events on stderr instead: https://github.com/databricks/cli/issues/448 - err = cmdio.RenderJson(ctx, newImportStartedEvent(sourceDir)) + err = cmdio.RenderJson(ctx, newImportStartedEvent(opts.sourceDir)) if err != nil { return err } // Walk local directory tree and import files to the workspace - err = filepath.WalkDir(sourceDir, importFileCallback(ctx, workspaceFiler, sourceDir, targetDir)) + err = filepath.WalkDir(opts.sourceDir, opts.callback(ctx, workspaceFiler)) if err != nil { return err } - return cmdio.RenderJson(ctx, newImportCompletedEvent(targetDir)) - }, -} + return cmdio.RenderJson(ctx, newImportCompletedEvent(opts.targetDir)) + } -var importOverwrite bool + return cmd +} func init() { - importDirCommand.Flags().BoolVar(&importOverwrite, "overwrite", false, "overwrite existing workspace files") - Cmd.AddCommand(importDirCommand) + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newImportDir()) + }) } diff --git a/cmd/workspace/workspace/overrides.go b/cmd/workspace/workspace/overrides.go index e1b97c598..9cae5bef5 100644 --- a/cmd/workspace/workspace/overrides.go +++ b/cmd/workspace/workspace/overrides.go @@ -1,14 +1,25 @@ package workspace -import "github.com/databricks/cli/libs/cmdio" +import ( + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/spf13/cobra" +) -func init() { +func listOverride(listCmd *cobra.Command, listReq *workspace.ListWorkspaceRequest) { listReq.Path = "/" listCmd.Annotations["template"] = cmdio.Heredoc(` {{header "ID"}} {{header "Type"}} {{header "Language"}} {{header "Path"}} {{range .}}{{green "%d" .ObjectId}} {{blue "%s" .ObjectType}} {{cyan "%s" .Language}} {{.Path|cyan}} {{end}}`) +} +func exportOverride(exportCmd *cobra.Command, exportReq *workspace.ExportRequest) { // The export command prints the contents of the file to stdout by default. exportCmd.Annotations["template"] = `{{.Content | b64_decode}}` } + +func init() { + listOverrides = append(listOverrides, listOverride) + exportOverrides = append(exportOverrides, exportOverride) +} diff --git a/cmd/workspace/workspace/workspace.go b/cmd/workspace/workspace/workspace.go index ab9c6aec0..153fffe43 100755 --- a/cmd/workspace/workspace/workspace.go +++ b/cmd/workspace/workspace/workspace.go @@ -3,8 +3,6 @@ package workspace import ( - "fmt" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -12,36 +10,56 @@ import ( "github.com/spf13/cobra" ) -var Cmd = &cobra.Command{ - Use: "workspace", - Short: `The Workspace API allows you to list, import, export, and delete notebooks and folders.`, - Long: `The Workspace API allows you to list, import, export, and delete notebooks and +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "workspace", + Short: `The Workspace API allows you to list, import, export, and delete notebooks and folders.`, + Long: `The Workspace API allows you to list, import, export, and delete notebooks and folders. A notebook is a web-based interface to a document that contains runnable code, visualizations, and explanatory text.`, - Annotations: map[string]string{ - "package": "workspace", - }, + GroupID: "workspace", + Annotations: map[string]string{ + "package": "workspace", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd } // start delete command -var deleteReq workspace.Delete -var deleteJson flags.JsonFlag -func init() { - Cmd.AddCommand(deleteCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *workspace.Delete, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq workspace.Delete + var deleteJson flags.JsonFlag + // TODO: short flags - deleteCmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&deleteJson, "json", `either inline JSON string or @path/to/file.json with request body`) - deleteCmd.Flags().BoolVar(&deleteReq.Recursive, "recursive", deleteReq.Recursive, `The flag that specifies whether to delete the object recursively.`) + cmd.Flags().BoolVar(&deleteReq.Recursive, "recursive", deleteReq.Recursive, `The flag that specifies whether to delete the object recursively.`) -} - -var deleteCmd = &cobra.Command{ - Use: "delete PATH", - Short: `Delete a workspace object.`, - Long: `Delete a workspace object. + cmd.Use = "delete PATH" + cmd.Short = `Delete a workspace object.` + cmd.Long = `Delete a workspace object. Deletes an object or a directory (and optionally recursively deletes all objects in the directory). * If path does not exist, this call returns an @@ -50,11 +68,20 @@ var deleteCmd = &cobra.Command{ DIRECTORY_NOT_EMPTY. Object deletion cannot be undone and deleting a directory recursively is not - atomic.`, + atomic.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -64,23 +91,6 @@ var deleteCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." - names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The absolute path of the notebook or directory") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the absolute path of the notebook or directory") - } deleteReq.Path = args[0] } @@ -89,27 +99,47 @@ var deleteCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) } // start export command -var exportReq workspace.ExportRequest -func init() { - Cmd.AddCommand(exportCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var exportOverrides []func( + *cobra.Command, + *workspace.ExportRequest, +) + +func newExport() *cobra.Command { + cmd := &cobra.Command{} + + var exportReq workspace.ExportRequest + // TODO: short flags - exportCmd.Flags().Var(&exportReq.Format, "format", `This specifies the format of the exported file.`) + cmd.Flags().Var(&exportReq.Format, "format", `This specifies the format of the exported file.`) -} - -var exportCmd = &cobra.Command{ - Use: "export PATH", - Short: `Export a workspace object.`, - Long: `Export a workspace object. + cmd.Use = "export PATH" + cmd.Short = `Export a workspace object.` + cmd.Long = `Export a workspace object. Exports an object or the contents of an entire directory. @@ -118,31 +148,20 @@ var exportCmd = &cobra.Command{ If the exported data would exceed size limit, this call returns MAX_NOTEBOOK_SIZE_EXCEEDED. Currently, this API does not support exporting a - library.`, + library.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." - names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The absolute path of the object or directory") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the absolute path of the object or directory") - } exportReq.Path = args[0] response, err := w.Workspace.Export(ctx, exportReq) @@ -150,36 +169,58 @@ var exportCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range exportOverrides { + fn(cmd, &exportReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newExport()) + }) } // start get-status command -var getStatusReq workspace.GetStatusRequest -func init() { - Cmd.AddCommand(getStatusCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getStatusOverrides []func( + *cobra.Command, + *workspace.GetStatusRequest, +) + +func newGetStatus() *cobra.Command { + cmd := &cobra.Command{} + + var getStatusReq workspace.GetStatusRequest + // TODO: short flags -} - -var getStatusCmd = &cobra.Command{ - Use: "get-status PATH", - Short: `Get status.`, - Long: `Get status. + cmd.Use = "get-status PATH" + cmd.Short = `Get status.` + cmd.Long = `Get status. Gets the status of an object or a directory. If path does not exist, this - call returns an error RESOURCE_DOES_NOT_EXIST.`, + call returns an error RESOURCE_DOES_NOT_EXIST.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -190,48 +231,70 @@ var getStatusCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getStatusOverrides { + fn(cmd, &getStatusReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetStatus()) + }) } // start import command -var importReq workspace.Import -var importJson flags.JsonFlag -func init() { - Cmd.AddCommand(importCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var importOverrides []func( + *cobra.Command, + *workspace.Import, +) + +func newImport() *cobra.Command { + cmd := &cobra.Command{} + + var importReq workspace.Import + var importJson flags.JsonFlag + // TODO: short flags - importCmd.Flags().Var(&importJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&importJson, "json", `either inline JSON string or @path/to/file.json with request body`) - importCmd.Flags().StringVar(&importReq.Content, "content", importReq.Content, `The base64-encoded content.`) - importCmd.Flags().Var(&importReq.Format, "format", `This specifies the format of the file to be imported.`) - importCmd.Flags().Var(&importReq.Language, "language", `The language of the object.`) - importCmd.Flags().BoolVar(&importReq.Overwrite, "overwrite", importReq.Overwrite, `The flag that specifies whether to overwrite existing object.`) + cmd.Flags().StringVar(&importReq.Content, "content", importReq.Content, `The base64-encoded content.`) + cmd.Flags().Var(&importReq.Format, "format", `This specifies the format of the file to be imported.`) + cmd.Flags().Var(&importReq.Language, "language", `The language of the object.`) + cmd.Flags().BoolVar(&importReq.Overwrite, "overwrite", importReq.Overwrite, `The flag that specifies whether to overwrite existing object.`) -} - -var importCmd = &cobra.Command{ - Use: "import PATH", - Short: `Import a workspace object.`, - Long: `Import a workspace object. + cmd.Use = "import PATH" + cmd.Short = `Import a workspace object.` + cmd.Long = `Import a workspace object. Imports a workspace object (for example, a notebook or file) or the contents of an entire directory. If path already exists and overwrite is set to false, this call returns an error RESOURCE_ALREADY_EXISTS. One can only - use DBC format to import a directory.`, + use DBC format to import a directory.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -249,39 +312,61 @@ var importCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range importOverrides { + fn(cmd, &importReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newImport()) + }) } // start list command -var listReq workspace.ListWorkspaceRequest -func init() { - Cmd.AddCommand(listCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *workspace.ListWorkspaceRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq workspace.ListWorkspaceRequest + // TODO: short flags - listCmd.Flags().IntVar(&listReq.NotebooksModifiedAfter, "notebooks-modified-after", listReq.NotebooksModifiedAfter, `UTC timestamp in milliseconds.`) + cmd.Flags().IntVar(&listReq.NotebooksModifiedAfter, "notebooks-modified-after", listReq.NotebooksModifiedAfter, `UTC timestamp in milliseconds.`) -} - -var listCmd = &cobra.Command{ - Use: "list PATH", - Short: `List contents.`, - Long: `List contents. + cmd.Use = "list PATH" + cmd.Short = `List contents.` + cmd.Long = `List contents. Lists the contents of a directory, or the object if it is not a directory. If the input path does not exist, this call returns an error - RESOURCE_DOES_NOT_EXIST.`, + RESOURCE_DOES_NOT_EXIST.` - Annotations: map[string]string{}, - Args: func(cmd *cobra.Command, args []string) error { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { check := cobra.ExactArgs(1) return check(cmd, args) - }, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -292,38 +377,67 @@ var listCmd = &cobra.Command{ return err } return cmdio.Render(ctx, response) - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) } // start mkdirs command -var mkdirsReq workspace.Mkdirs -var mkdirsJson flags.JsonFlag -func init() { - Cmd.AddCommand(mkdirsCmd) +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var mkdirsOverrides []func( + *cobra.Command, + *workspace.Mkdirs, +) + +func newMkdirs() *cobra.Command { + cmd := &cobra.Command{} + + var mkdirsReq workspace.Mkdirs + var mkdirsJson flags.JsonFlag + // TODO: short flags - mkdirsCmd.Flags().Var(&mkdirsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().Var(&mkdirsJson, "json", `either inline JSON string or @path/to/file.json with request body`) -} - -var mkdirsCmd = &cobra.Command{ - Use: "mkdirs PATH", - Short: `Create a directory.`, - Long: `Create a directory. + cmd.Use = "mkdirs PATH" + cmd.Short = `Create a directory.` + cmd.Long = `Create a directory. Creates the specified directory (and necessary parent directories if they do not exist). If there is an object (not a directory) at any prefix of the input path, this call returns an error RESOURCE_ALREADY_EXISTS. Note that if this operation fails it may have succeeded in creating some of - the necessary parent directories.`, + the necessary parent directories.` - Annotations: map[string]string{}, - PreRunE: root.MustWorkspaceClient, - RunE: func(cmd *cobra.Command, args []string) (err error) { + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) @@ -333,23 +447,6 @@ var mkdirsCmd = &cobra.Command{ return err } } else { - if len(args) == 0 { - promptSpinner := cmdio.Spinner(ctx) - promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." - names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) - close(promptSpinner) - if err != nil { - return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) - } - id, err := cmdio.Select(ctx, names, "The absolute path of the directory") - if err != nil { - return err - } - args = append(args, id) - } - if len(args) != 1 { - return fmt.Errorf("expected to have the absolute path of the directory") - } mkdirsReq.Path = args[0] } @@ -358,10 +455,24 @@ var mkdirsCmd = &cobra.Command{ return err } return nil - }, + } + // Disable completions since they are not applicable. // Can be overridden by manual implementation in `override.go`. - ValidArgsFunction: cobra.NoFileCompletions, + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range mkdirsOverrides { + fn(cmd, &mkdirsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newMkdirs()) + }) } // end service Workspace diff --git a/internal/helpers.go b/internal/helpers.go index 449b6d9ab..194f0eee4 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -15,7 +15,7 @@ import ( "testing" "time" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" _ "github.com/databricks/cli/cmd/version" "github.com/spf13/cobra" "github.com/spf13/pflag" @@ -117,7 +117,7 @@ func (t *cobraTestRunner) RunBackground() { var stdoutW, stderrW io.WriteCloser stdoutR, stdoutW = io.Pipe() stderrR, stderrW = io.Pipe() - root := root.RootCmd + root := cmd.New() root.SetOut(stdoutW) root.SetErr(stderrW) root.SetArgs(t.args) diff --git a/main.go b/main.go index 959c9b295..414e42d0b 100644 --- a/main.go +++ b/main.go @@ -1,6 +1,7 @@ package main import ( + "github.com/databricks/cli/cmd" _ "github.com/databricks/cli/cmd/account" _ "github.com/databricks/cli/cmd/api" _ "github.com/databricks/cli/cmd/auth" @@ -15,5 +16,5 @@ import ( ) func main() { - root.Execute() + root.Execute(cmd.New()) } diff --git a/main_test.go b/main_test.go index 4c7a8ebc3..6a5d19448 100644 --- a/main_test.go +++ b/main_test.go @@ -3,7 +3,7 @@ package main import ( "testing" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) @@ -15,7 +15,7 @@ func TestCommandsDontUseUnderscoreInName(t *testing.T) { // This test lives in the main package because this is where // all commands are imported. // - queue := []*cobra.Command{root.RootCmd} + queue := []*cobra.Command{cmd.New()} for len(queue) > 0 { cmd := queue[0] assert.NotContains(t, cmd.Name(), "_") From 7a8d413a4b2e18e5077d54c3a705b0d3e57b0e91 Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 26 Jul 2023 10:36:49 +0200 Subject: [PATCH 015/139] Fix git clone integration test for non-existing repo --- internal/git_clone_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/git_clone_test.go b/internal/git_clone_test.go index b280ebc7d..6c76adeef 100644 --- a/internal/git_clone_test.go +++ b/internal/git_clone_test.go @@ -59,5 +59,5 @@ func TestAccGitCloneRepositoryDoesNotExist(t *testing.T) { tmpDir := t.TempDir() err := git.Clone(context.Background(), "doesnot-exist", "", tmpDir) - assert.Contains(t, err.Error(), `repository 'https://github.com/databricks/doesnot-exist/' not found`) + assert.Error(t, err) } From 1d21d3cfd32c2f4a74f7cac633f3342e81184fea Mon Sep 17 00:00:00 2001 From: Shreyas Goenka Date: Wed, 26 Jul 2023 10:37:17 +0200 Subject: [PATCH 016/139] Revert "Fix git clone integration test for non-existing repo" This reverts commit 7a8d413a4b2e18e5077d54c3a705b0d3e57b0e91. --- internal/git_clone_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/git_clone_test.go b/internal/git_clone_test.go index 6c76adeef..b280ebc7d 100644 --- a/internal/git_clone_test.go +++ b/internal/git_clone_test.go @@ -59,5 +59,5 @@ func TestAccGitCloneRepositoryDoesNotExist(t *testing.T) { tmpDir := t.TempDir() err := git.Clone(context.Background(), "doesnot-exist", "", tmpDir) - assert.Error(t, err) + assert.Contains(t, err.Error(), `repository 'https://github.com/databricks/doesnot-exist/' not found`) } From 5e0a09672206059da899f49327ec5f44304380db Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 26 Jul 2023 11:02:17 +0200 Subject: [PATCH 017/139] Fixed python wheel test (#608) ## Changes Fixed python wheel test ## Tests --- .github/workflows/push.yml | 4 +++- bundle/tests/bundle/python_wheel/bundle.yml | 2 +- bundle/tests/bundle/wheel_test.go | 10 ++++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 7e1bc2437..2406ed71f 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -39,7 +39,9 @@ jobs: go install honnef.co/go/tools/cmd/staticcheck@latest - name: Pull external libraries - run: make vendor + run: | + make vendor + pip install wheel - name: Run tests run: make test diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/bundle/python_wheel/bundle.yml index 9c518589d..b3a793a63 100644 --- a/bundle/tests/bundle/python_wheel/bundle.yml +++ b/bundle/tests/bundle/python_wheel/bundle.yml @@ -5,7 +5,7 @@ artifacts: my_test_code: type: whl path: "./my_test_code" - build: "/usr/local/bin/python setup.py bdist_wheel" + build: "python setup.py bdist_wheel" resources: jobs: diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index 9a6b2fd2c..5b786185b 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -2,18 +2,15 @@ package bundle import ( "context" - "os" + "path/filepath" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/phases" - "github.com/databricks/cli/internal" "github.com/stretchr/testify/require" ) -func TestAccBundlePythonWheelBuild(t *testing.T) { - t.Log(internal.GetEnvOrSkipTest(t, "CLOUD_ENV")) - +func TestBundlePythonWheelBuild(t *testing.T) { b, err := bundle.Load("./python_wheel") require.NoError(t, err) @@ -21,6 +18,7 @@ func TestAccBundlePythonWheelBuild(t *testing.T) { err = m.Apply(context.Background(), b) require.NoError(t, err) - _, err = os.Stat("./python_wheel/my_test_code/dist/my_test_code-0.0.1-py2-none-any.whl") + matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") require.NoError(t, err) + require.Equal(t, 1, len(matches)) } From f0ad28ab62adb71d2442ae327f3c55c77c5c0686 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jul 2023 11:37:18 +0200 Subject: [PATCH 018/139] Fix tests under ./cmd/configure if DATABRICKS_TOKEN is set (#605) ## Changes The assertions would fail because `DATABRICKS_TOKEN` overrides a token set in the profile. ## Tests Tests now pass if `DATABRICKS_TOKEN` is set. --- cmd/configure/configure_test.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/cmd/configure/configure_test.go b/cmd/configure/configure_test.go index 0dbf114d6..7b627ba98 100644 --- a/cmd/configure/configure_test.go +++ b/cmd/configure/configure_test.go @@ -14,8 +14,9 @@ import ( func assertKeyValueInSection(t *testing.T, section *ini.Section, keyName, expectedValue string) { key, err := section.GetKey(keyName) - assert.NoError(t, err) - assert.Equal(t, key.Value(), expectedValue) + if assert.NoError(t, err) { + assert.Equal(t, expectedValue, key.Value()) + } } func setup(t *testing.T) string { @@ -26,6 +27,7 @@ func setup(t *testing.T) string { } t.Setenv(homeEnvVar, tempHomeDir) t.Setenv("DATABRICKS_CONFIG_FILE", "") + t.Setenv("DATABRICKS_TOKEN", "") return tempHomeDir } From cfff140815da6b8c8194100d0ae861891573c2fe Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 26 Jul 2023 12:07:26 +0200 Subject: [PATCH 019/139] Auto detect Python wheel packages and infer build command (#603) --- .github/workflows/push.yml | 2 +- bundle/artifacts/autodetect.go | 32 ++++++++ bundle/artifacts/build.go | 13 ++-- bundle/artifacts/infer.go | 60 +++++++++++++++ bundle/artifacts/whl/autodetect.go | 74 +++++++++++++++++++ bundle/artifacts/whl/autodetect_test.go | 22 ++++++ bundle/artifacts/whl/build.go | 6 -- bundle/artifacts/whl/infer.go | 34 +++++++++ bundle/artifacts/whl/testdata/setup.py | 15 ++++ .../artifacts/whl/testdata/setup_incorrect.py | 14 ++++ .../artifacts/whl/testdata/setup_minimal.py | 3 + bundle/phases/build.go | 2 + bundle/tests/bundle/python_wheel/bundle.yml | 2 +- .../python_wheel_no_artifact/.gitignore | 3 + .../python_wheel_no_artifact/bundle.yml | 13 ++++ .../my_test_code/__init__.py | 2 + .../my_test_code/__main__.py | 16 ++++ .../bundle/python_wheel_no_artifact/setup.py | 15 ++++ bundle/tests/bundle/wheel_test.go | 13 ++++ python/runner.go | 4 +- python/runner_test.go | 6 +- 21 files changed, 332 insertions(+), 19 deletions(-) create mode 100644 bundle/artifacts/autodetect.go create mode 100644 bundle/artifacts/infer.go create mode 100644 bundle/artifacts/whl/autodetect.go create mode 100644 bundle/artifacts/whl/autodetect_test.go create mode 100644 bundle/artifacts/whl/infer.go create mode 100644 bundle/artifacts/whl/testdata/setup.py create mode 100644 bundle/artifacts/whl/testdata/setup_incorrect.py create mode 100644 bundle/artifacts/whl/testdata/setup_minimal.py create mode 100644 bundle/tests/bundle/python_wheel_no_artifact/.gitignore create mode 100644 bundle/tests/bundle/python_wheel_no_artifact/bundle.yml create mode 100644 bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py create mode 100644 bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py create mode 100644 bundle/tests/bundle/python_wheel_no_artifact/setup.py diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 2406ed71f..0f9b60161 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -41,7 +41,7 @@ jobs: - name: Pull external libraries run: | make vendor - pip install wheel + pip3 install wheel - name: Run tests run: make test diff --git a/bundle/artifacts/autodetect.go b/bundle/artifacts/autodetect.go new file mode 100644 index 000000000..fa8126f97 --- /dev/null +++ b/bundle/artifacts/autodetect.go @@ -0,0 +1,32 @@ +package artifacts + +import ( + "context" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/libs/log" +) + +func DetectPackages() bundle.Mutator { + return &autodetect{} +} + +type autodetect struct { +} + +func (m *autodetect) Name() string { + return "artifacts.DetectPackages" +} + +func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) error { + // If artifacts section explicitly defined, do not try to auto detect packages + if b.Config.Artifacts != nil { + log.Debugf(ctx, "artifacts block is defined, skipping auto-detecting") + return nil + } + + return bundle.Apply(ctx, b, bundle.Seq( + whl.DetectPackage(), + )) +} diff --git a/bundle/artifacts/build.go b/bundle/artifacts/build.go index 7721635a8..6b1aac822 100644 --- a/bundle/artifacts/build.go +++ b/bundle/artifacts/build.go @@ -33,12 +33,13 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if len(artifact.Files) == 0 && artifact.BuildCommand == "" { - return fmt.Errorf("artifact %s misconfigured: 'files' or 'build' property is required", m.name) - } - - // If artifact file is explicitly defined, skip building the artifact - if len(artifact.Files) != 0 { + // Skip building if build command is not specified or infered + if artifact.BuildCommand == "" { + // If no build command was specified or infered and there is no + // artifact output files specified, artifact is misconfigured + if len(artifact.Files) == 0 { + return fmt.Errorf("misconfigured artifact: please specify 'build' or 'files' property") + } return nil } diff --git a/bundle/artifacts/infer.go b/bundle/artifacts/infer.go new file mode 100644 index 000000000..233fbda86 --- /dev/null +++ b/bundle/artifacts/infer.go @@ -0,0 +1,60 @@ +package artifacts + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/artifacts/whl" + "github.com/databricks/cli/bundle/config" +) + +var inferMutators map[config.ArtifactType]mutatorFactory = map[config.ArtifactType]mutatorFactory{ + config.ArtifactPythonWheel: whl.InferBuildCommand, +} + +func getInferMutator(t config.ArtifactType, name string) bundle.Mutator { + mutatorFactory, ok := inferMutators[t] + if !ok { + return nil + } + + return mutatorFactory(name) +} + +func InferMissingProperties() bundle.Mutator { + return &all{ + name: "infer", + fn: inferArtifactByName, + } +} + +func inferArtifactByName(name string) (bundle.Mutator, error) { + return &infer{name}, nil +} + +type infer struct { + name string +} + +func (m *infer) Name() string { + return fmt.Sprintf("artifacts.Infer(%s)", m.name) +} + +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact, ok := b.Config.Artifacts[m.name] + if !ok { + return fmt.Errorf("artifact doesn't exist: %s", m.name) + } + + if artifact.BuildCommand != "" { + return nil + } + + inferMutator := getInferMutator(artifact.Type, m.name) + if inferMutator != nil { + return bundle.Apply(ctx, b, inferMutator) + } + + return nil +} diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go new file mode 100644 index 000000000..a801b48d7 --- /dev/null +++ b/bundle/artifacts/whl/autodetect.go @@ -0,0 +1,74 @@ +package whl + +import ( + "context" + "fmt" + "os" + "path/filepath" + "regexp" + "time" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" +) + +type detectPkg struct { +} + +func DetectPackage() bundle.Mutator { + return &detectPkg{} +} + +func (m *detectPkg) Name() string { + return "artifacts.whl.AutoDetect" +} + +func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { + cmdio.LogString(ctx, "artifacts.whl.AutoDetect: Detecting Python wheel project...") + + // checking if there is setup.py in the bundle root + setupPy := filepath.Join(b.Config.Path, "setup.py") + _, err := os.Stat(setupPy) + if err != nil { + cmdio.LogString(ctx, "artifacts.whl.AutoDetect: No Python wheel project found at bundle root folder") + return nil + } + + cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.AutoDetect: Found Python wheel project at %s", b.Config.Path)) + module := extractModuleName(setupPy) + + if b.Config.Artifacts == nil { + b.Config.Artifacts = make(map[string]*config.Artifact) + } + + pkgPath, err := filepath.Abs(b.Config.Path) + if err != nil { + return err + } + b.Config.Artifacts[module] = &config.Artifact{ + Path: pkgPath, + Type: config.ArtifactPythonWheel, + } + + return nil +} + +func extractModuleName(setupPy string) string { + bytes, err := os.ReadFile(setupPy) + if err != nil { + return randomName() + } + + content := string(bytes) + r := regexp.MustCompile(`name=['"](.*)['"]`) + matches := r.FindStringSubmatch(content) + if len(matches) == 0 { + return randomName() + } + return matches[1] +} + +func randomName() string { + return fmt.Sprintf("artifact%d", time.Now().Unix()) +} diff --git a/bundle/artifacts/whl/autodetect_test.go b/bundle/artifacts/whl/autodetect_test.go new file mode 100644 index 000000000..b53289b2a --- /dev/null +++ b/bundle/artifacts/whl/autodetect_test.go @@ -0,0 +1,22 @@ +package whl + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestExtractModuleName(t *testing.T) { + moduleName := extractModuleName("./testdata/setup.py") + assert.Equal(t, "my_test_code", moduleName) +} + +func TestExtractModuleNameMinimal(t *testing.T) { + moduleName := extractModuleName("./testdata/setup_minimal.py") + assert.Equal(t, "my_test_code", moduleName) +} + +func TestExtractModuleNameIncorrect(t *testing.T) { + moduleName := extractModuleName("./testdata/setup_incorrect.py") + assert.Contains(t, moduleName, "artifact") +} diff --git a/bundle/artifacts/whl/build.go b/bundle/artifacts/whl/build.go index 4ee47153b..4565a4c80 100644 --- a/bundle/artifacts/whl/build.go +++ b/bundle/artifacts/whl/build.go @@ -32,12 +32,6 @@ func (m *build) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - // TODO: If not set, BuildCommand should be infer prior to this - // via a mutator so that it can be observable. - if artifact.BuildCommand == "" { - return fmt.Errorf("artifacts.whl.Build(%s): missing build property for the artifact", m.name) - } - cmdio.LogString(ctx, fmt.Sprintf("artifacts.whl.Build(%s): Building...", m.name)) dir := artifact.Path diff --git a/bundle/artifacts/whl/infer.go b/bundle/artifacts/whl/infer.go new file mode 100644 index 000000000..518d926ca --- /dev/null +++ b/bundle/artifacts/whl/infer.go @@ -0,0 +1,34 @@ +package whl + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/python" +) + +type infer struct { + name string +} + +func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { + artifact := b.Config.Artifacts[m.name] + py, err := python.DetectExecutable(ctx) + if err != nil { + return err + } + artifact.BuildCommand = fmt.Sprintf("%s setup.py bdist_wheel", py) + + return nil +} + +func (m *infer) Name() string { + return fmt.Sprintf("artifacts.whl.Infer(%s)", m.name) +} + +func InferBuildCommand(name string) bundle.Mutator { + return &infer{ + name: name, + } +} diff --git a/bundle/artifacts/whl/testdata/setup.py b/bundle/artifacts/whl/testdata/setup.py new file mode 100644 index 000000000..7a1317b2f --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/artifacts/whl/testdata/setup_incorrect.py b/bundle/artifacts/whl/testdata/setup_incorrect.py new file mode 100644 index 000000000..c6aa17b2d --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup_incorrect.py @@ -0,0 +1,14 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/artifacts/whl/testdata/setup_minimal.py b/bundle/artifacts/whl/testdata/setup_minimal.py new file mode 100644 index 000000000..3e81e7217 --- /dev/null +++ b/bundle/artifacts/whl/testdata/setup_minimal.py @@ -0,0 +1,3 @@ +from setuptools import setup + +setup(name="my_test_code") diff --git a/bundle/phases/build.go b/bundle/phases/build.go index 9249c32c0..fe90c3691 100644 --- a/bundle/phases/build.go +++ b/bundle/phases/build.go @@ -11,6 +11,8 @@ func Build() bundle.Mutator { return newPhase( "build", []bundle.Mutator{ + artifacts.DetectPackages(), + artifacts.InferMissingProperties(), artifacts.BuildAll(), interpolation.Interpolate( interpolation.IncludeLookupsInPath("artifacts"), diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/bundle/python_wheel/bundle.yml index b3a793a63..4e272c9f5 100644 --- a/bundle/tests/bundle/python_wheel/bundle.yml +++ b/bundle/tests/bundle/python_wheel/bundle.yml @@ -5,7 +5,7 @@ artifacts: my_test_code: type: whl path: "./my_test_code" - build: "python setup.py bdist_wheel" + build: "python3 setup.py bdist_wheel" resources: jobs: diff --git a/bundle/tests/bundle/python_wheel_no_artifact/.gitignore b/bundle/tests/bundle/python_wheel_no_artifact/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml new file mode 100644 index 000000000..109086729 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml @@ -0,0 +1,13 @@ +bundle: + name: python-wheel + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py new file mode 100644 index 000000000..73d045afb --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/my_test_code/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print('Hello from my func') + print('Got arguments:') + print(sys.argv) + + +if __name__ == '__main__': + main() diff --git a/bundle/tests/bundle/python_wheel_no_artifact/setup.py b/bundle/tests/bundle/python_wheel_no_artifact/setup.py new file mode 100644 index 000000000..7a1317b2f --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact/setup.py @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import my_test_code + +setup( + name="my_test_code", + version=my_test_code.__version__, + author=my_test_code.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my test wheel", + packages=find_packages(include=["my_test_code"]), + entry_points={"group_1": "run=my_test_code.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index 5b786185b..2290e47c6 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -22,3 +22,16 @@ func TestBundlePythonWheelBuild(t *testing.T) { require.NoError(t, err) require.Equal(t, 1, len(matches)) } + +func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { + b, err := bundle.Load("./python_wheel_no_artifact") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(context.Background(), b) + require.NoError(t, err) + + matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") + require.NoError(t, err) + require.Equal(t, 1, len(matches)) +} diff --git a/python/runner.go b/python/runner.go index 6145da277..b2946b297 100644 --- a/python/runner.go +++ b/python/runner.go @@ -15,7 +15,7 @@ func PyInline(ctx context.Context, inlinePy string) (string, error) { } func Py(ctx context.Context, script string, args ...string) (string, error) { - py, err := detectExecutable(ctx) + py, err := DetectExecutable(ctx) if err != nil { return "", err } @@ -70,7 +70,7 @@ func detectVirtualEnv() (string, error) { var pyExec string -func detectExecutable(ctx context.Context) (string, error) { +func DetectExecutable(ctx context.Context) (string, error) { if pyExec != "" { return pyExec, nil } diff --git a/python/runner_test.go b/python/runner_test.go index 321a1b7dc..b43d218ce 100644 --- a/python/runner_test.go +++ b/python/runner_test.go @@ -25,14 +25,14 @@ func TestExecAndPassError(t *testing.T) { func TestDetectPython(t *testing.T) { pyExec = "" - py, err := detectExecutable(context.Background()) + py, err := DetectExecutable(context.Background()) assert.NoError(t, err) assert.Contains(t, py, "python3") } func TestDetectPythonCache(t *testing.T) { pyExec = "abc" - py, err := detectExecutable(context.Background()) + py, err := DetectExecutable(context.Background()) assert.NoError(t, err) assert.Equal(t, "abc", py) pyExec = "" @@ -82,7 +82,7 @@ func TestPyInline(t *testing.T) { } func TestPyInlineStderr(t *testing.T) { - detectExecutable(context.Background()) + DetectExecutable(context.Background()) inline := "import sys; sys.stderr.write('___msg___'); sys.exit(1)" _, err := PyInline(context.Background(), inline) assert.EqualError(t, err, "___msg___") From ec892aa11c7c5f9a98220d95fa448a4d1433cc23 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jul 2023 13:17:09 +0200 Subject: [PATCH 020/139] Remove dependency on global state for the root command (#606) ## Changes This change is another step towards a CLI without globals. Also see #595. The flags for the root command are now encapsulated in struct types. ## Tests Unit tests pass. --- cmd/bundle/run.go | 4 +-- cmd/root/auth.go | 6 ++-- cmd/root/bundle.go | 6 ++-- cmd/root/bundle_test.go | 51 ++++++++++++++++++------------ cmd/root/io.go | 29 +++++++++++------ cmd/root/logger.go | 54 +++++++++++++++++++------------- cmd/root/progress_logger.go | 37 +++++++++++++++------- cmd/root/progress_logger_test.go | 36 +++++++++++++++++---- cmd/root/root.go | 52 +++++++++++++++++------------- 9 files changed, 177 insertions(+), 98 deletions(-) diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 9ca8fe456..439e3522e 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -47,7 +47,7 @@ var runCmd = &cobra.Command{ return err } if output != nil { - switch root.OutputType() { + switch root.OutputType(cmd) { case flags.OutputText: resultString, err := output.String() if err != nil { @@ -61,7 +61,7 @@ var runCmd = &cobra.Command{ } cmd.OutOrStdout().Write(b) default: - return fmt.Errorf("unknown output type %s", root.OutputType()) + return fmt.Errorf("unknown output type %s", root.OutputType(cmd)) } } return nil diff --git a/cmd/root/auth.go b/cmd/root/auth.go index ae7f73968..c13f74637 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -21,9 +21,9 @@ var workspaceClient int var accountClient int var currentUser int -func init() { - RootCmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") - RootCmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion) +func initProfileFlag(cmd *cobra.Command) { + cmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") + cmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion) } func MustAccountClient(cmd *cobra.Command, args []string) error { diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 8eab7c2c7..8a3b5977f 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -118,8 +118,8 @@ func environmentCompletion(cmd *cobra.Command, args []string, toComplete string) return maps.Keys(b.Config.Environments), cobra.ShellCompDirectiveDefault } -func init() { +func initEnvironmentFlag(cmd *cobra.Command) { // To operate in the context of a bundle, all commands must take an "environment" parameter. - RootCmd.PersistentFlags().StringP("environment", "e", "", "bundle environment to use (if applicable)") - RootCmd.RegisterFlagCompletionFunc("environment", environmentCompletion) + cmd.PersistentFlags().StringP("environment", "e", "", "bundle environment to use (if applicable)") + cmd.RegisterFlagCompletionFunc("environment", environmentCompletion) } diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 8dc771bd4..4b44e019b 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) @@ -27,15 +28,18 @@ func setupDatabricksCfg(t *testing.T) { t.Setenv(homeEnvVar, tempHomeDir) } -func setup(t *testing.T, host string) *bundle.Bundle { +func emptyCommand(t *testing.T) *cobra.Command { + ctx := context.Background() + cmd := &cobra.Command{} + cmd.SetContext(ctx) + initProfileFlag(cmd) + return cmd +} + +func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { setupDatabricksCfg(t) - ctx := context.Background() - RootCmd.SetContext(ctx) - _, err := initializeLogger(ctx) - assert.NoError(t, err) - - err = configureBundle(RootCmd, []string{"validate"}, func() (*bundle.Bundle, error) { + err := configureBundle(cmd, []string{"validate"}, func() (*bundle.Bundle, error) { return &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -48,46 +52,50 @@ func setup(t *testing.T, host string) *bundle.Bundle { }, nil }) assert.NoError(t, err) - - return bundle.Get(RootCmd.Context()) + return bundle.Get(cmd.Context()) } func TestBundleConfigureDefault(t *testing.T) { - b := setup(t, "https://x.com") + cmd := emptyCommand(t) + b := setup(t, cmd, "https://x.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) } func TestBundleConfigureWithMultipleMatches(t *testing.T) { - b := setup(t, "https://a.com") + cmd := emptyCommand(t) + b := setup(t, cmd, "https://a.com") assert.Panics(t, func() { b.WorkspaceClient() }) } func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { - RootCmd.Flag("profile").Value.Set("NOEXIST") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("NOEXIST") - b := setup(t, "https://x.com") + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "no matching config profiles found", func() { b.WorkspaceClient() }) } func TestBundleConfigureWithMismatchedProfile(t *testing.T) { - RootCmd.Flag("profile").Value.Set("PROFILE-1") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") - b := setup(t, "https://x.com") + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { b.WorkspaceClient() }) } func TestBundleConfigureWithCorrectProfile(t *testing.T) { - RootCmd.Flag("profile").Value.Set("PROFILE-1") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") - b := setup(t, "https://a.com") + b := setup(t, cmd, "https://a.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) @@ -99,7 +107,8 @@ func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) { t.Setenv("DATABRICKS_CONFIG_PROFILE", "") }) - b := setup(t, "https://x.com") + cmd := emptyCommand(t) + b := setup(t, cmd, "https://x.com") assert.PanicsWithError(t, "config host mismatch: profile uses host https://a.com, but CLI configured to use https://x.com", func() { b.WorkspaceClient() }) @@ -110,9 +119,11 @@ func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { t.Cleanup(func() { t.Setenv("DATABRICKS_CONFIG_PROFILE", "") }) - RootCmd.Flag("profile").Value.Set("PROFILE-1") - b := setup(t, "https://a.com") + cmd := emptyCommand(t) + cmd.Flag("profile").Value.Set("PROFILE-1") + + b := setup(t, cmd, "https://a.com") assert.NotPanics(t, func() { b.WorkspaceClient() }) diff --git a/cmd/root/io.go b/cmd/root/io.go index 93830c804..380c01b18 100644 --- a/cmd/root/io.go +++ b/cmd/root/io.go @@ -10,32 +10,43 @@ import ( const envOutputFormat = "DATABRICKS_OUTPUT_FORMAT" -var outputType flags.Output = flags.OutputText +type outputFlag struct { + output flags.Output +} + +func initOutputFlag(cmd *cobra.Command) *outputFlag { + f := outputFlag{ + output: flags.OutputText, + } -func init() { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := os.LookupEnv(envOutputFormat); ok { - outputType.Set(v) + f.output.Set(v) } - RootCmd.PersistentFlags().VarP(&outputType, "output", "o", "output type: text or json") + cmd.PersistentFlags().VarP(&f.output, "output", "o", "output type: text or json") + return &f } -func OutputType() flags.Output { - return outputType +func OutputType(cmd *cobra.Command) flags.Output { + f, ok := cmd.Flag("output").Value.(*flags.Output) + if !ok { + panic("output flag not defined") + } + + return *f } -func initializeIO(cmd *cobra.Command) error { +func (f *outputFlag) initializeIO(cmd *cobra.Command) error { var template string if cmd.Annotations != nil { // rely on zeroval being an empty string template = cmd.Annotations["template"] } - cmdIO := cmdio.NewIO(outputType, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), template) + cmdIO := cmdio.NewIO(f.output, cmd.InOrStdin(), cmd.OutOrStdout(), cmd.ErrOrStderr(), template) ctx := cmdio.InContext(cmd.Context(), cmdIO) cmd.SetContext(ctx) - return nil } diff --git a/cmd/root/logger.go b/cmd/root/logger.go index 89d707604..87f695503 100644 --- a/cmd/root/logger.go +++ b/cmd/root/logger.go @@ -10,6 +10,7 @@ import ( "github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/log" "github.com/fatih/color" + "github.com/spf13/cobra" "golang.org/x/exp/slog" ) @@ -66,12 +67,18 @@ func (l *friendlyHandler) Handle(ctx context.Context, rec slog.Record) error { return err } -func makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { - switch logOutput { +type logFlags struct { + file flags.LogFileFlag + level flags.LogLevelFlag + output flags.Output +} + +func (f *logFlags) makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { + switch f.output { case flags.OutputJSON: - return opts.NewJSONHandler(logFile.Writer()), nil + return opts.NewJSONHandler(f.file.Writer()), nil case flags.OutputText: - w := logFile.Writer() + w := f.file.Writer() if cmdio.IsTTY(w) { return &friendlyHandler{ Handler: opts.NewTextHandler(w), @@ -81,13 +88,13 @@ func makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { return opts.NewTextHandler(w), nil default: - return nil, fmt.Errorf("invalid log output mode: %s", logOutput) + return nil, fmt.Errorf("invalid log output mode: %s", f.output) } } -func initializeLogger(ctx context.Context) (context.Context, error) { +func (f *logFlags) initializeContext(ctx context.Context) (context.Context, error) { opts := slog.HandlerOptions{} - opts.Level = logLevel.Level() + opts.Level = f.level.Level() opts.AddSource = true opts.ReplaceAttr = log.ReplaceAttrFunctions{ log.ReplaceLevelAttr, @@ -95,12 +102,12 @@ func initializeLogger(ctx context.Context) (context.Context, error) { }.ReplaceAttr // Open the underlying log file if the user configured an actual file to log to. - err := logFile.Open() + err := f.file.Open() if err != nil { return nil, err } - handler, err := makeLogHandler(opts) + handler, err := f.makeLogHandler(opts) if err != nil { return nil, err } @@ -109,27 +116,30 @@ func initializeLogger(ctx context.Context) (context.Context, error) { return log.NewContext(ctx, slog.Default()), nil } -var logFile = flags.NewLogFileFlag() -var logLevel = flags.NewLogLevelFlag() -var logOutput = flags.OutputText +func initLogFlags(cmd *cobra.Command) *logFlags { + f := logFlags{ + file: flags.NewLogFileFlag(), + level: flags.NewLogLevelFlag(), + output: flags.OutputText, + } -func init() { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := os.LookupEnv(envLogFile); ok { - logFile.Set(v) + f.file.Set(v) } if v, ok := os.LookupEnv(envLogLevel); ok { - logLevel.Set(v) + f.level.Set(v) } if v, ok := os.LookupEnv(envLogFormat); ok { - logOutput.Set(v) + f.output.Set(v) } - RootCmd.PersistentFlags().Var(&logFile, "log-file", "file to write logs to") - RootCmd.PersistentFlags().Var(&logLevel, "log-level", "log level") - RootCmd.PersistentFlags().Var(&logOutput, "log-format", "log output format (text or json)") - RootCmd.RegisterFlagCompletionFunc("log-file", logFile.Complete) - RootCmd.RegisterFlagCompletionFunc("log-level", logLevel.Complete) - RootCmd.RegisterFlagCompletionFunc("log-format", logOutput.Complete) + cmd.PersistentFlags().Var(&f.file, "log-file", "file to write logs to") + cmd.PersistentFlags().Var(&f.level, "log-level", "log level") + cmd.PersistentFlags().Var(&f.output, "log-format", "log output format (text or json)") + cmd.RegisterFlagCompletionFunc("log-file", f.file.Complete) + cmd.RegisterFlagCompletionFunc("log-level", f.level.Complete) + cmd.RegisterFlagCompletionFunc("log-format", f.output.Complete) + return &f } diff --git a/cmd/root/progress_logger.go b/cmd/root/progress_logger.go index fbd90ebb8..bdf52558b 100644 --- a/cmd/root/progress_logger.go +++ b/cmd/root/progress_logger.go @@ -7,42 +7,55 @@ import ( "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" "golang.org/x/term" ) const envProgressFormat = "DATABRICKS_CLI_PROGRESS_FORMAT" -func resolveModeDefault(format flags.ProgressLogFormat) flags.ProgressLogFormat { - if (logLevel.String() == "disabled" || logFile.String() != "stderr") && +type progressLoggerFlag struct { + flags.ProgressLogFormat + + log *logFlags +} + +func (f *progressLoggerFlag) resolveModeDefault(format flags.ProgressLogFormat) flags.ProgressLogFormat { + if (f.log.level.String() == "disabled" || f.log.file.String() != "stderr") && term.IsTerminal(int(os.Stderr.Fd())) { return flags.ModeInplace } return flags.ModeAppend } -func initializeProgressLogger(ctx context.Context) (context.Context, error) { - if logLevel.String() != "disabled" && logFile.String() == "stderr" && - progressFormat == flags.ModeInplace { +func (f *progressLoggerFlag) initializeContext(ctx context.Context) (context.Context, error) { + if f.log.level.String() != "disabled" && f.log.file.String() == "stderr" && + f.ProgressLogFormat == flags.ModeInplace { return nil, fmt.Errorf("inplace progress logging cannot be used when log-file is stderr") } - format := progressFormat + format := f.ProgressLogFormat if format == flags.ModeDefault { - format = resolveModeDefault(format) + format = f.resolveModeDefault(format) } progressLogger := cmdio.NewLogger(format) return cmdio.NewContext(ctx, progressLogger), nil } -var progressFormat = flags.NewProgressLogFormat() +func initProgressLoggerFlag(cmd *cobra.Command, logFlags *logFlags) *progressLoggerFlag { + f := progressLoggerFlag{ + ProgressLogFormat: flags.NewProgressLogFormat(), + + log: logFlags, + } -func init() { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. if v, ok := os.LookupEnv(envProgressFormat); ok { - progressFormat.Set(v) + f.Set(v) } - RootCmd.PersistentFlags().Var(&progressFormat, "progress-format", "format for progress logs (append, inplace, json)") - RootCmd.RegisterFlagCompletionFunc("progress-format", progressFormat.Complete) + + cmd.PersistentFlags().Var(&f.ProgressLogFormat, "progress-format", "format for progress logs (append, inplace, json)") + cmd.RegisterFlagCompletionFunc("progress-format", f.ProgressLogFormat.Complete) + return &f } diff --git a/cmd/root/progress_logger_test.go b/cmd/root/progress_logger_test.go index 30359257c..9dceee8d5 100644 --- a/cmd/root/progress_logger_test.go +++ b/cmd/root/progress_logger_test.go @@ -6,38 +6,62 @@ import ( "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" + "github.com/spf13/cobra" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) +type progressLoggerTest struct { + *cobra.Command + *logFlags + *progressLoggerFlag +} + +func initializeProgressLoggerTest(t *testing.T) ( + *progressLoggerTest, + *flags.LogLevelFlag, + *flags.LogFileFlag, + *flags.ProgressLogFormat, +) { + plt := &progressLoggerTest{ + Command: &cobra.Command{}, + } + plt.logFlags = initLogFlags(plt.Command) + plt.progressLoggerFlag = initProgressLoggerFlag(plt.Command, plt.logFlags) + return plt, &plt.logFlags.level, &plt.logFlags.file, &plt.progressLoggerFlag.ProgressLogFormat +} + func TestInitializeErrorOnIncompatibleConfig(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("info") logFile.Set("stderr") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.ErrorContains(t, err, "inplace progress logging cannot be used when log-file is stderr") } func TestNoErrorOnDisabledLogLevel(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("disabled") logFile.Set("stderr") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } func TestNoErrorOnNonStderrLogFile(t *testing.T) { + plt, logLevel, logFile, progressFormat := initializeProgressLoggerTest(t) logLevel.Set("info") logFile.Set("stdout") progressFormat.Set("inplace") - _, err := initializeProgressLogger(context.Background()) + _, err := plt.progressLoggerFlag.initializeContext(context.Background()) assert.NoError(t, err) } func TestDefaultLoggerModeResolution(t *testing.T) { - progressFormat = flags.NewProgressLogFormat() - require.Equal(t, progressFormat, flags.ModeDefault) - ctx, err := initializeProgressLogger(context.Background()) + plt, _, _, progressFormat := initializeProgressLoggerTest(t) + require.Equal(t, *progressFormat, flags.ModeDefault) + ctx, err := plt.progressLoggerFlag.initializeContext(context.Background()) require.NoError(t, err) logger, ok := cmdio.FromContext(ctx) assert.True(t, ok) diff --git a/cmd/root/root.go b/cmd/root/root.go index 663dd645f..45fc27f2c 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -13,26 +13,34 @@ import ( "golang.org/x/exp/slog" ) -// RootCmd represents the base command when called without any subcommands -var RootCmd = &cobra.Command{ - Use: "databricks", - Short: "Databricks CLI", - Version: build.GetInfo().Version, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "databricks", + Short: "Databricks CLI", + Version: build.GetInfo().Version, - // Cobra prints the usage string to stderr if a command returns an error. - // This usage string should only be displayed if an invalid combination of flags - // is specified and not when runtime errors occur (e.g. resource not found). - // The usage string is include in [flagErrorFunc] for flag errors only. - SilenceUsage: true, + // Cobra prints the usage string to stderr if a command returns an error. + // This usage string should only be displayed if an invalid combination of flags + // is specified and not when runtime errors occur (e.g. resource not found). + // The usage string is include in [flagErrorFunc] for flag errors only. + SilenceUsage: true, - // Silence error printing by cobra. Errors are printed through cmdio. - SilenceErrors: true, + // Silence error printing by cobra. Errors are printed through cmdio. + SilenceErrors: true, + } - PersistentPreRunE: func(cmd *cobra.Command, args []string) error { + // Initialize flags + logFlags := initLogFlags(cmd) + progressLoggerFlag := initProgressLoggerFlag(cmd, logFlags) + outputFlag := initOutputFlag(cmd) + initProfileFlag(cmd) + initEnvironmentFlag(cmd) + + cmd.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() // Configure default logger. - ctx, err := initializeLogger(ctx) + ctx, err := logFlags.initializeContext(ctx) if err != nil { return err } @@ -43,7 +51,7 @@ var RootCmd = &cobra.Command{ slog.String("args", strings.Join(os.Args, ", "))) // Configure progress logger - ctx, err = initializeProgressLogger(ctx) + ctx, err = progressLoggerFlag.initializeContext(ctx) if err != nil { return err } @@ -51,7 +59,7 @@ var RootCmd = &cobra.Command{ cmd.SetContext(ctx) // Configure command IO - err = initializeIO(cmd) + err = outputFlag.initializeIO(cmd) if err != nil { return err } @@ -63,7 +71,11 @@ var RootCmd = &cobra.Command{ ctx = withUpstreamInUserAgent(ctx) cmd.SetContext(ctx) return nil - }, + } + + cmd.SetFlagErrorFunc(flagErrorFunc) + cmd.SetVersionTemplate("Databricks CLI v{{.Version}}\n") + return cmd } // Wrap flag errors to include the usage string. @@ -104,7 +116,5 @@ func Execute(cmd *cobra.Command) { } } -func init() { - RootCmd.SetFlagErrorFunc(flagErrorFunc) - RootCmd.SetVersionTemplate("Databricks CLI v{{.Version}}\n") -} +// Keep a global copy until all commands can be initialized. +var RootCmd = New() From 8cdf7284f857e1e0a2d5f0c7289ca8bdd233938d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 26 Jul 2023 14:48:05 +0200 Subject: [PATCH 021/139] Add merge_group trigger for build (#612) ## Changes Also see [GitHub docs](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/configuring-pull-request-merges/managing-a-merge-queue#triggering-merge-group-checks-with-github-actions). ## Tests n/a --- .github/workflows/push.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 0f9b60161..1dcf3eaf3 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -5,6 +5,8 @@ on: types: [opened, synchronize] push: branches: [main] + merge_group: + types: [checks_requested] jobs: tests: From 12bba177438bf225b92b12d204c171d0c83a21c3 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 26 Jul 2023 14:58:52 +0200 Subject: [PATCH 022/139] Added support for build command chaining and error on missing wheel (#607) ## Changes Added support for build command chaining and error on missing wheel --- bundle/config/artifact.go | 18 ++++++++++++++---- bundle/libraries/libraries.go | 8 ++++++++ 2 files changed, 22 insertions(+), 4 deletions(-) diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 1ac371e93..60331eb13 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -1,6 +1,7 @@ package config import ( + "bytes" "context" "fmt" "os/exec" @@ -40,10 +41,19 @@ func (a *Artifact) Build(ctx context.Context) ([]byte, error) { return nil, fmt.Errorf("no build property defined") } - buildParts := strings.Split(a.BuildCommand, " ") - cmd := exec.CommandContext(ctx, buildParts[0], buildParts[1:]...) - cmd.Dir = a.Path - return cmd.CombinedOutput() + out := make([][]byte, 0) + commands := strings.Split(a.BuildCommand, " && ") + for _, command := range commands { + buildParts := strings.Split(command, " ") + cmd := exec.CommandContext(ctx, buildParts[0], buildParts[1:]...) + cmd.Dir = a.Path + res, err := cmd.CombinedOutput() + if err != nil { + return res, err + } + out = append(out, res) + } + return bytes.Join(out, []byte{}), nil } func (a *Artifact) NormalisePaths() { diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index ff86a34b5..f7a2574ad 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -68,6 +68,10 @@ func findArtifactsAndMarkForUpload(ctx context.Context, lib *compute.Library, b return err } + if len(matches) == 0 && isLocalLibrary(lib) { + return fmt.Errorf("no library found for %s", libPath(lib)) + } + for _, match := range matches { af, err := findArtifactFileByLocalPath(match, b) if err != nil { @@ -105,3 +109,7 @@ func libPath(library *compute.Library) string { return "" } + +func isLocalLibrary(library *compute.Library) bool { + return libPath(library) != "" +} From 8ffff241fe30b3523aadf97f5664dd24f4457f63 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Wed, 26 Jul 2023 15:03:10 +0200 Subject: [PATCH 023/139] Add TestAcc prefix to filer test and fix any failing tests (#611) ## Changes Fs integration tests were not running on our nightlies before because the nightlies only run tests with the `TestAcc` prefix. A couple of them were also broken! This PR fixes the tests and adds the prefix to all fs integration tests. As a followup we can automate the check for this prefix. ## Tested Fs tests are green and pass on both azure and aws --- internal/fs_cat_test.go | 10 +++++----- internal/fs_ls_test.go | 14 +++++++------- internal/{mkdir_test.go => fs_mkdir_test.go} | 13 ++++++++----- internal/{rm_test.go => fs_rm_test.go} | 20 ++++++++++---------- 4 files changed, 30 insertions(+), 27 deletions(-) rename internal/{mkdir_test.go => fs_mkdir_test.go} (85%) rename internal/{rm_test.go => fs_rm_test.go} (86%) diff --git a/internal/fs_cat_test.go b/internal/fs_cat_test.go index 5d6952f4f..f3c8e59cd 100644 --- a/internal/fs_cat_test.go +++ b/internal/fs_cat_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsCatForDbfs(t *testing.T) { +func TestAccFsCatForDbfs(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -33,21 +33,21 @@ func TestFsCatForDbfs(t *testing.T) { assert.Equal(t, "abc", stdout.String()) } -func TestFsCatForDbfsOnNonExistentFile(t *testing.T) { +func TestAccFsCatForDbfsOnNonExistentFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "cat", "dbfs:/non-existent-file") assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsCatForDbfsInvalidScheme(t *testing.T) { +func TestAccFsCatForDbfsInvalidScheme(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "cat", "dab:/non-existent-file") - assert.ErrorContains(t, err, "expected dbfs path (with the dbfs:/ prefix): dab:/non-existent-file") + assert.ErrorContains(t, err, "invalid scheme: dab") } -func TestFsCatDoesNotSupportOutputModeJson(t *testing.T) { +func TestAccFsCatDoesNotSupportOutputModeJson(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() diff --git a/internal/fs_ls_test.go b/internal/fs_ls_test.go index 885fc31f9..d21817284 100644 --- a/internal/fs_ls_test.go +++ b/internal/fs_ls_test.go @@ -16,7 +16,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsLsForDbfs(t *testing.T) { +func TestAccFsLsForDbfs(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -51,7 +51,7 @@ func TestFsLsForDbfs(t *testing.T) { assert.Equal(t, float64(3), parsedStdout[1]["size"]) } -func TestFsLsForDbfsWithAbsolutePaths(t *testing.T) { +func TestAccFsLsForDbfsWithAbsolutePaths(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -87,7 +87,7 @@ func TestFsLsForDbfsWithAbsolutePaths(t *testing.T) { assert.Equal(t, float64(3), parsedStdout[1]["size"]) } -func TestFsLsForDbfsOnFile(t *testing.T) { +func TestAccFsLsForDbfsOnFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -108,7 +108,7 @@ func TestFsLsForDbfsOnFile(t *testing.T) { assert.Regexp(t, regexp.MustCompile("not a directory: .*/a/hello.txt"), err.Error()) } -func TestFsLsForDbfsOnEmptyDir(t *testing.T) { +func TestAccFsLsForDbfsOnEmptyDir(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) w, err := databricks.NewWorkspaceClient() @@ -126,16 +126,16 @@ func TestFsLsForDbfsOnEmptyDir(t *testing.T) { assert.Equal(t, 0, len(parsedStdout)) } -func TestFsLsForDbfsForNonexistingDir(t *testing.T) { +func TestAccFsLsForDbfsForNonexistingDir(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "ls", "dbfs:/john-cena", "--output=json") assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsLsWithoutScheme(t *testing.T) { +func TestAccFsLsWithoutScheme(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) _, _, err := RequireErrorRun(t, "fs", "ls", "/ray-mysterio", "--output=json") - assert.ErrorContains(t, err, "expected dbfs path (with the dbfs:/ prefix): /ray-mysterio") + assert.ErrorIs(t, err, fs.ErrNotExist) } diff --git a/internal/mkdir_test.go b/internal/fs_mkdir_test.go similarity index 85% rename from internal/mkdir_test.go rename to internal/fs_mkdir_test.go index 7c96e63b1..137750e28 100644 --- a/internal/mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -3,6 +3,7 @@ package internal import ( "context" "path" + "regexp" "strings" "testing" @@ -12,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TesFsMkdirCreatesDirectory(t *testing.T) { +func TestAccFsMkdirCreatesDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -36,7 +37,7 @@ func TesFsMkdirCreatesDirectory(t *testing.T) { assert.Equal(t, true, info.IsDir()) } -func TestFsMkdirCreatesMultipleDirectories(t *testing.T) { +func TestAccFsMkdirCreatesMultipleDirectories(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -72,7 +73,7 @@ func TestFsMkdirCreatesMultipleDirectories(t *testing.T) { assert.Equal(t, true, infoC.IsDir()) } -func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { +func TestAccFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -93,7 +94,7 @@ func TestFsMkdirWhenDirectoryAlreadyExists(t *testing.T) { assert.Equal(t, "", stdout.String()) } -func TestFsMkdirWhenFileExistsAtPath(t *testing.T) { +func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -110,5 +111,7 @@ func TestFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) - assert.ErrorContains(t, err, "Cannot create directory") + // Different backends return different errors (for example: file in s3 vs dbfs) + regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file`) + assert.Regexp(t, regex, err.Error()) } diff --git a/internal/rm_test.go b/internal/fs_rm_test.go similarity index 86% rename from internal/rm_test.go rename to internal/fs_rm_test.go index dd6a28593..1bee06c74 100644 --- a/internal/rm_test.go +++ b/internal/fs_rm_test.go @@ -13,7 +13,7 @@ import ( "github.com/stretchr/testify/require" ) -func TestFsRmForFile(t *testing.T) { +func TestAccFsRmForFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -45,7 +45,7 @@ func TestFsRmForFile(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForEmptyDirectory(t *testing.T) { +func TestAccFsRmForEmptyDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -77,7 +77,7 @@ func TestFsRmForEmptyDirectory(t *testing.T) { assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForNonEmptyDirectory(t *testing.T) { +func TestAccFsRmForNonEmptyDirectory(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() @@ -101,19 +101,19 @@ func TestFsRmForNonEmptyDirectory(t *testing.T) { // Run rm command _, _, err = RequireErrorRun(t, "fs", "rm", "dbfs:"+path.Join(tmpDir, "avacado")) - assert.ErrorContains(t, err, "Non-recursive delete of non-empty directory") + assert.ErrorIs(t, err, fs.ErrInvalid) + assert.ErrorContains(t, err, "directory not empty") } -func TestFsRmForNonExistentFile(t *testing.T) { +func TestAccFsRmForNonExistentFile(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) - // No error is returned on command run - stdout, stderr := RequireSuccessfulRun(t, "fs", "rm", "dbfs:/does-not-exist") - assert.Equal(t, "", stderr.String()) - assert.Equal(t, "", stdout.String()) + // Expect error if file does not exist + _, _, err := RequireErrorRun(t, "fs", "rm", "dbfs:/does-not-exist") + assert.ErrorIs(t, err, fs.ErrNotExist) } -func TestFsRmForNonEmptyDirectoryWithRecursiveFlag(t *testing.T) { +func TestAccFsRmForNonEmptyDirectoryWithRecursiveFlag(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) ctx := context.Background() From ed972f7ae02979b0b3822871ce10a4755a0502b6 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 27 Jul 2023 11:51:31 +0200 Subject: [PATCH 024/139] Add url parse helper function for templates (#600) ## Tests unit test --- libs/template/helpers.go | 5 +++++ libs/template/helpers_test.go | 15 +++++++++++++++ .../urlparse-function/template/hello.tmpl | 3 +++ 3 files changed, 23 insertions(+) create mode 100644 libs/template/testdata/urlparse-function/template/hello.tmpl diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 342b3811d..94737c1eb 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -2,6 +2,7 @@ package template import ( "fmt" + "net/url" "regexp" "text/template" ) @@ -18,6 +19,10 @@ var helperFuncs = template.FuncMap{ "fail": func(format string, args ...any) (any, error) { return nil, ErrFail{fmt.Sprintf(format, args...)} }, + // Alias for https://pkg.go.dev/net/url#Parse. Allows usage of all methods of url.URL + "url": func(rawUrl string) (*url.URL, error) { + return url.Parse(rawUrl) + }, // Alias for https://pkg.go.dev/regexp#Compile. Allows usage of all methods of regexp.Regexp "regexp": func(expr string) (*regexp.Regexp, error) { return regexp.Compile(expr) diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index 51c470efc..e904edecc 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -39,3 +39,18 @@ func TestTemplateRegexpCompileFunction(t *testing.T) { assert.Contains(t, content, "0:food") assert.Contains(t, content, "1:fool") } + +func TestTemplateUrlFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/urlparse-function/template", "./testdata/urlparse-function/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, "https://www.databricks.com", string(r.files[0].content)) +} diff --git a/libs/template/testdata/urlparse-function/template/hello.tmpl b/libs/template/testdata/urlparse-function/template/hello.tmpl new file mode 100644 index 000000000..c365284b2 --- /dev/null +++ b/libs/template/testdata/urlparse-function/template/hello.tmpl @@ -0,0 +1,3 @@ +{{ with url "https://www.databricks.com/a/b?o=123#my-fragment" -}} +{{- print .Scheme `://` .Host -}} +{{- end -}} From bee7a16cb075cfc245cefc95109602869efb77ed Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 27 Jul 2023 12:03:08 +0200 Subject: [PATCH 025/139] Remove dependency on global state for remaining commands (#613) ## Changes This removes the remaining dependency on global state and unblocks work to parallelize integration tests. As is, we can already uncomment an integration test that had to be skipped because of other tests tainting global state. This is no longer an issue. Also see #595 and #606. ## Tests * Unit and integration tests pass. * Manually confirmed the help output is the same. --- cmd/api/api.go | 32 +++++++-------- cmd/auth/auth.go | 27 +++++++------ cmd/auth/env.go | 27 ++++++------- cmd/auth/login.go | 36 ++++++++--------- cmd/auth/profiles.go | 39 ++++++++++--------- cmd/auth/token.go | 25 ++++++------ cmd/bundle/bundle.go | 23 +++++++++++ cmd/bundle/debug/debug.go | 19 --------- cmd/bundle/debug/whoami.go | 30 -------------- cmd/bundle/deploy.go | 28 ++++++------- cmd/bundle/destroy.go | 29 +++++++------- cmd/bundle/launch.go | 27 ++++++------- cmd/bundle/root.go | 23 ----------- cmd/bundle/run.go | 34 ++++++++-------- cmd/bundle/schema.go | 26 ++++++------- cmd/bundle/sync.go | 49 ++++++++++++----------- cmd/bundle/test.go | 25 ++++++------ cmd/bundle/validate.go | 19 ++++----- cmd/bundle/variables.go | 9 ++++- cmd/cmd.go | 52 +++++++++++++------------ cmd/configure/configure.go | 49 ++++++++++++----------- cmd/configure/configure_test.go | 19 +++++---- cmd/fs/cat.go | 22 +++++------ cmd/fs/cp.go | 69 ++++++++++++++++----------------- cmd/fs/fs.go | 24 +++++++----- cmd/fs/ls.go | 37 +++++++++--------- cmd/fs/mkdir.go | 28 ++++++------- cmd/fs/rm.go | 28 ++++++------- cmd/root/root.go | 3 -- cmd/sync/sync.go | 67 +++++++++++++++++--------------- cmd/sync/sync_test.go | 13 ++++--- cmd/version/version.go | 23 ++++++----- internal/secrets_test.go | 7 ---- main.go | 10 ----- 34 files changed, 476 insertions(+), 502 deletions(-) create mode 100644 cmd/bundle/bundle.go delete mode 100644 cmd/bundle/debug/debug.go delete mode 100644 cmd/bundle/debug/whoami.go delete mode 100644 cmd/bundle/root.go diff --git a/cmd/api/api.go b/cmd/api/api.go index 563efa732..698781e63 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -5,7 +5,6 @@ import ( "net/http" "strings" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" "github.com/databricks/databricks-sdk-go/client" @@ -13,9 +12,22 @@ import ( "github.com/spf13/cobra" ) -var apiCmd = &cobra.Command{ - Use: "api", - Short: "Perform Databricks API call", +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "api", + Short: "Perform Databricks API call", + } + + cmd.AddCommand( + makeCommand(http.MethodGet), + makeCommand(http.MethodHead), + makeCommand(http.MethodPost), + makeCommand(http.MethodPut), + makeCommand(http.MethodPatch), + makeCommand(http.MethodDelete), + ) + + return cmd } func makeCommand(method string) *cobra.Command { @@ -59,15 +71,3 @@ func makeCommand(method string) *cobra.Command { command.Flags().Var(&payload, "json", `either inline JSON string or @path/to/file.json with request body`) return command } - -func init() { - apiCmd.AddCommand( - makeCommand(http.MethodGet), - makeCommand(http.MethodHead), - makeCommand(http.MethodPost), - makeCommand(http.MethodPut), - makeCommand(http.MethodPatch), - makeCommand(http.MethodDelete), - ) - root.RootCmd.AddCommand(apiCmd) -} diff --git a/cmd/auth/auth.go b/cmd/auth/auth.go index b7e8d2d78..e0c7c7c5b 100644 --- a/cmd/auth/auth.go +++ b/cmd/auth/auth.go @@ -3,18 +3,27 @@ package auth import ( "context" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/auth" "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) -var authCmd = &cobra.Command{ - Use: "auth", - Short: "Authentication related commands", -} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "auth", + Short: "Authentication related commands", + } -var persistentAuth auth.PersistentAuth + var perisistentAuth auth.PersistentAuth + cmd.PersistentFlags().StringVar(&perisistentAuth.Host, "host", perisistentAuth.Host, "Databricks Host") + cmd.PersistentFlags().StringVar(&perisistentAuth.AccountID, "account-id", perisistentAuth.AccountID, "Databricks Account ID") + + cmd.AddCommand(newEnvCommand()) + cmd.AddCommand(newLoginCommand(&perisistentAuth)) + cmd.AddCommand(newProfilesCommand()) + cmd.AddCommand(newTokenCommand(&perisistentAuth)) + return cmd +} func promptForHost(ctx context.Context) (string, error) { prompt := cmdio.Prompt(ctx) @@ -41,9 +50,3 @@ func promptForAccountID(ctx context.Context) (string, error) { } return accountId, nil } - -func init() { - root.RootCmd.AddCommand(authCmd) - authCmd.PersistentFlags().StringVar(&persistentAuth.Host, "host", persistentAuth.Host, "Databricks Host") - authCmd.PersistentFlags().StringVar(&persistentAuth.AccountID, "account-id", persistentAuth.AccountID, "Databricks Account ID") -} diff --git a/cmd/auth/env.go b/cmd/auth/env.go index e288c576c..7bf3fd91f 100644 --- a/cmd/auth/env.go +++ b/cmd/auth/env.go @@ -89,10 +89,18 @@ func loadFromDatabricksCfg(cfg *config.Config) error { return nil } -var envCmd = &cobra.Command{ - Use: "env", - Short: "Get env", - RunE: func(cmd *cobra.Command, args []string) error { +func newEnvCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "env", + Short: "Get env", + } + + var host string + var profile string + cmd.Flags().StringVar(&host, "host", host, "Hostname to get auth env for") + cmd.Flags().StringVar(&profile, "profile", profile, "Profile to get auth env for") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { cfg := &config.Config{ Host: host, Profile: profile, @@ -130,14 +138,7 @@ var envCmd = &cobra.Command{ } cmd.OutOrStdout().Write(raw) return nil - }, -} + } -var host string -var profile string - -func init() { - authCmd.AddCommand(envCmd) - envCmd.Flags().StringVar(&host, "host", host, "Hostname to get auth env for") - envCmd.Flags().StringVar(&profile, "profile", profile, "Profile to get auth env for") + return cmd } diff --git a/cmd/auth/login.go b/cmd/auth/login.go index 37d44c084..fcb0e0ddb 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -14,10 +14,7 @@ import ( "github.com/spf13/cobra" ) -var loginTimeout time.Duration -var configureCluster bool - -func configureHost(ctx context.Context, args []string, argIndex int) error { +func configureHost(ctx context.Context, persistentAuth *auth.PersistentAuth, args []string, argIndex int) error { if len(args) > argIndex { persistentAuth.Host = args[argIndex] return nil @@ -31,13 +28,23 @@ func configureHost(ctx context.Context, args []string, argIndex int) error { return nil } -var loginCmd = &cobra.Command{ - Use: "login [HOST]", - Short: "Authenticate this machine", - RunE: func(cmd *cobra.Command, args []string) error { +func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { + cmd := &cobra.Command{ + Use: "login [HOST]", + Short: "Authenticate this machine", + } + + var loginTimeout time.Duration + var configureCluster bool + cmd.Flags().DurationVar(&loginTimeout, "timeout", auth.DefaultTimeout, + "Timeout for completing login challenge in the browser") + cmd.Flags().BoolVar(&configureCluster, "configure-cluster", false, + "Prompts to configure cluster") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() if persistentAuth.Host == "" { - configureHost(ctx, args, 0) + configureHost(ctx, persistentAuth, args, 0) } defer persistentAuth.Close() @@ -108,14 +115,7 @@ var loginCmd = &cobra.Command{ cmdio.LogString(ctx, fmt.Sprintf("Profile %s was successfully saved", profileName)) return nil - }, -} + } -func init() { - authCmd.AddCommand(loginCmd) - loginCmd.Flags().DurationVar(&loginTimeout, "timeout", auth.DefaultTimeout, - "Timeout for completing login challenge in the browser") - - loginCmd.Flags().BoolVar(&configureCluster, "configure-cluster", false, - "Prompts to configure cluster") + return cmd } diff --git a/cmd/auth/profiles.go b/cmd/auth/profiles.go index d3b167b77..2b08164f6 100644 --- a/cmd/auth/profiles.go +++ b/cmd/auth/profiles.go @@ -44,7 +44,7 @@ func (c *profileMetadata) IsEmpty() bool { return c.Host == "" && c.AccountID == "" } -func (c *profileMetadata) Load(ctx context.Context) { +func (c *profileMetadata) Load(ctx context.Context, skipValidate bool) { // TODO: disable config loaders other than configfile cfg := &config.Config{Profile: c.Name} _ = cfg.EnsureResolved() @@ -94,16 +94,22 @@ func (c *profileMetadata) Load(ctx context.Context) { c.Host = cfg.Host } -var profilesCmd = &cobra.Command{ - Use: "profiles", - Short: "Lists profiles from ~/.databrickscfg", - Annotations: map[string]string{ - "template": cmdio.Heredoc(` - {{header "Name"}} {{header "Host"}} {{header "Valid"}} - {{range .Profiles}}{{.Name | green}} {{.Host|cyan}} {{bool .Valid}} - {{end}}`), - }, - RunE: func(cmd *cobra.Command, args []string) error { +func newProfilesCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "profiles", + Short: "Lists profiles from ~/.databrickscfg", + Annotations: map[string]string{ + "template": cmdio.Heredoc(` + {{header "Name"}} {{header "Host"}} {{header "Valid"}} + {{range .Profiles}}{{.Name | green}} {{.Host|cyan}} {{bool .Valid}} + {{end}}`), + }, + } + + var skipValidate bool + cmd.Flags().BoolVar(&skipValidate, "skip-validate", false, "Whether to skip validating the profiles") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var profiles []*profileMetadata iniFile, err := getDatabricksCfg() if os.IsNotExist(err) { @@ -126,7 +132,7 @@ var profilesCmd = &cobra.Command{ wg.Add(1) go func() { // load more information about profile - profile.Load(cmd.Context()) + profile.Load(cmd.Context(), skipValidate) wg.Done() }() profiles = append(profiles, profile) @@ -135,12 +141,7 @@ var profilesCmd = &cobra.Command{ return cmdio.Render(cmd.Context(), struct { Profiles []*profileMetadata `json:"profiles"` }{profiles}) - }, -} + } -var skipValidate bool - -func init() { - authCmd.AddCommand(profilesCmd) - profilesCmd.Flags().BoolVar(&skipValidate, "skip-validate", false, "Whether to skip validating the profiles") + return cmd } diff --git a/cmd/auth/token.go b/cmd/auth/token.go index 1b8d8b131..242a3dabe 100644 --- a/cmd/auth/token.go +++ b/cmd/auth/token.go @@ -9,15 +9,20 @@ import ( "github.com/spf13/cobra" ) -var tokenTimeout time.Duration +func newTokenCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { + cmd := &cobra.Command{ + Use: "token [HOST]", + Short: "Get authentication token", + } -var tokenCmd = &cobra.Command{ - Use: "token [HOST]", - Short: "Get authentication token", - RunE: func(cmd *cobra.Command, args []string) error { + var tokenTimeout time.Duration + cmd.Flags().DurationVar(&tokenTimeout, "timeout", auth.DefaultTimeout, + "Timeout for acquiring a token.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() if persistentAuth.Host == "" { - configureHost(ctx, args, 0) + configureHost(ctx, persistentAuth, args, 0) } defer persistentAuth.Close() @@ -33,11 +38,7 @@ var tokenCmd = &cobra.Command{ } cmd.OutOrStdout().Write(raw) return nil - }, -} + } -func init() { - authCmd.AddCommand(tokenCmd) - tokenCmd.Flags().DurationVar(&tokenTimeout, "timeout", auth.DefaultTimeout, - "Timeout for acquiring a token.") + return cmd } diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go new file mode 100644 index 000000000..8d1216f85 --- /dev/null +++ b/cmd/bundle/bundle.go @@ -0,0 +1,23 @@ +package bundle + +import ( + "github.com/spf13/cobra" +) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "bundle", + Short: "Databricks Asset Bundles", + } + + initVariableFlag(cmd) + cmd.AddCommand(newDeployCommand()) + cmd.AddCommand(newDestroyCommand()) + cmd.AddCommand(newLaunchCommand()) + cmd.AddCommand(newRunCommand()) + cmd.AddCommand(newSchemaCommand()) + cmd.AddCommand(newSyncCommand()) + cmd.AddCommand(newTestCommand()) + cmd.AddCommand(newValidateCommand()) + return cmd +} diff --git a/cmd/bundle/debug/debug.go b/cmd/bundle/debug/debug.go deleted file mode 100644 index fdc894ef1..000000000 --- a/cmd/bundle/debug/debug.go +++ /dev/null @@ -1,19 +0,0 @@ -package debug - -import ( - "github.com/spf13/cobra" - - parent "github.com/databricks/cli/cmd/bundle" -) - -var debugCmd = &cobra.Command{ - Use: "debug", -} - -func AddCommand(cmd *cobra.Command) { - debugCmd.AddCommand(cmd) -} - -func init() { - parent.AddCommand(debugCmd) -} diff --git a/cmd/bundle/debug/whoami.go b/cmd/bundle/debug/whoami.go deleted file mode 100644 index 95d97eeb5..000000000 --- a/cmd/bundle/debug/whoami.go +++ /dev/null @@ -1,30 +0,0 @@ -package debug - -import ( - "fmt" - - "github.com/databricks/cli/bundle" - bundleCmd "github.com/databricks/cli/cmd/bundle" - "github.com/spf13/cobra" -) - -var whoamiCmd = &cobra.Command{ - Use: "whoami", - - PreRunE: bundleCmd.ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { - ctx := cmd.Context() - w := bundle.Get(ctx).WorkspaceClient() - user, err := w.CurrentUser.Me(ctx) - if err != nil { - return err - } - - fmt.Fprintln(cmd.OutOrStdout(), user.UserName) - return nil - }, -} - -func init() { - debugCmd.AddCommand(whoamiCmd) -} diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index e8c0d3958..a39f19969 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -6,12 +6,19 @@ import ( "github.com/spf13/cobra" ) -var deployCmd = &cobra.Command{ - Use: "deploy", - Short: "Deploy bundle", +func newDeployCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "deploy", + Short: "Deploy bundle", + PreRunE: ConfigureBundleWithVariables, + } - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + var forceDeploy bool + var computeID string + cmd.Flags().BoolVar(&forceDeploy, "force", false, "Force acquisition of deployment lock.") + cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) // If `--force` is specified, force acquisition of the deployment lock. @@ -23,14 +30,7 @@ var deployCmd = &cobra.Command{ phases.Build(), phases.Deploy(), )) - }, -} + } -var forceDeploy bool -var computeID string - -func init() { - AddCommand(deployCmd) - deployCmd.Flags().BoolVar(&forceDeploy, "force", false, "Force acquisition of deployment lock.") - deployCmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") + return cmd } diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index d0fe699a0..82d821441 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -12,12 +12,20 @@ import ( "golang.org/x/term" ) -var destroyCmd = &cobra.Command{ - Use: "destroy", - Short: "Destroy deployed bundle resources", +func newDestroyCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "destroy", + Short: "Destroy deployed bundle resources", - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + var autoApprove bool + var forceDestroy bool + cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals for deleting resources and files") + cmd.Flags().BoolVar(&forceDestroy, "force", false, "Force acquisition of deployment lock.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() b := bundle.Get(ctx) @@ -47,14 +55,7 @@ var destroyCmd = &cobra.Command{ phases.Build(), phases.Destroy(), )) - }, -} + } -var autoApprove bool -var forceDestroy bool - -func init() { - AddCommand(destroyCmd) - destroyCmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals for deleting resources and files") - destroyCmd.Flags().BoolVar(&forceDestroy, "force", false, "Force acquisition of deployment lock.") + return cmd } diff --git a/cmd/bundle/launch.go b/cmd/bundle/launch.go index ae44352e3..bbb43600a 100644 --- a/cmd/bundle/launch.go +++ b/cmd/bundle/launch.go @@ -7,17 +7,20 @@ import ( "github.com/spf13/cobra" ) -var launchCmd = &cobra.Command{ - Use: "launch", - Short: "Launches a notebook on development cluster", - Long: `Reads a file and executes it on dev cluster`, - Args: cobra.ExactArgs(1), +func newLaunchCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "launch", + Short: "Launches a notebook on development cluster", + Long: `Reads a file and executes it on dev cluster`, + Args: cobra.ExactArgs(1), - // We're not ready to expose this command until we specify its semantics. - Hidden: true, + // We're not ready to expose this command until we specify its semantics. + Hidden: true, - PreRunE: root.MustConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: root.MustConfigureBundle, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { return fmt.Errorf("TODO") // contents, err := os.ReadFile(args[0]) // if err != nil { @@ -29,9 +32,7 @@ var launchCmd = &cobra.Command{ // } // fmt.Fprintf(cmd.OutOrStdout(), "Success: %s", results.Text()) // return nil - }, -} + } -func init() { - AddCommand(launchCmd) + return cmd } diff --git a/cmd/bundle/root.go b/cmd/bundle/root.go deleted file mode 100644 index 395ed3837..000000000 --- a/cmd/bundle/root.go +++ /dev/null @@ -1,23 +0,0 @@ -package bundle - -import ( - "github.com/databricks/cli/cmd/root" - "github.com/spf13/cobra" -) - -// rootCmd represents the root command for the bundle subcommand. -var rootCmd = &cobra.Command{ - Use: "bundle", - Short: "Databricks Asset Bundles", -} - -func AddCommand(cmd *cobra.Command) { - rootCmd.AddCommand(cmd) -} - -var variables []string - -func init() { - root.RootCmd.AddCommand(rootCmd) - AddVariableFlag(rootCmd) -} diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 439e3522e..28b9ae7cd 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -13,16 +13,22 @@ import ( "github.com/spf13/cobra" ) -var runOptions run.Options -var noWait bool +func newRunCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "run [flags] KEY", + Short: "Run a workload (e.g. a job or a pipeline)", -var runCmd = &cobra.Command{ - Use: "run [flags] KEY", - Short: "Run a workload (e.g. a job or a pipeline)", + Args: cobra.ExactArgs(1), + PreRunE: ConfigureBundleWithVariables, + } - Args: cobra.ExactArgs(1), - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + var runOptions run.Options + runOptions.Define(cmd.Flags()) + + var noWait bool + cmd.Flags().BoolVar(&noWait, "no-wait", false, "Don't wait for the run to complete.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) err := bundle.Apply(cmd.Context(), b, bundle.Seq( @@ -65,9 +71,9 @@ var runCmd = &cobra.Command{ } } return nil - }, + } - ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { if len(args) > 0 { return nil, cobra.ShellCompDirectiveNoFileComp } @@ -86,11 +92,7 @@ var runCmd = &cobra.Command{ } return run.ResourceCompletions(b), cobra.ShellCompDirectiveNoFileComp - }, -} + } -func init() { - runOptions.Define(runCmd.Flags()) - rootCmd.AddCommand(runCmd) - runCmd.Flags().BoolVar(&noWait, "no-wait", false, "Don't wait for the run to complete.") + return cmd } diff --git a/cmd/bundle/schema.go b/cmd/bundle/schema.go index b288d78e9..8b2c0177b 100644 --- a/cmd/bundle/schema.go +++ b/cmd/bundle/schema.go @@ -9,11 +9,18 @@ import ( "github.com/spf13/cobra" ) -var schemaCmd = &cobra.Command{ - Use: "schema", - Short: "Generate JSON Schema for bundle configuration", +func newSchemaCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "schema", + Short: "Generate JSON Schema for bundle configuration", + } - RunE: func(cmd *cobra.Command, args []string) error { + var openapi string + var onlyDocs bool + cmd.Flags().StringVar(&openapi, "openapi", "", "path to a databricks openapi spec") + cmd.Flags().BoolVar(&onlyDocs, "only-docs", false, "only generate descriptions for the schema") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { docs, err := schema.BundleDocs(openapi) if err != nil { return err @@ -34,14 +41,7 @@ var schemaCmd = &cobra.Command{ } cmd.OutOrStdout().Write(result) return nil - }, -} + } -var openapi string -var onlyDocs bool - -func init() { - AddCommand(schemaCmd) - schemaCmd.Flags().StringVar(&openapi, "openapi", "", "path to a databricks openapi spec") - schemaCmd.Flags().BoolVar(&onlyDocs, "only-docs", false, "only generate descriptions for the schema") + return cmd } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 19adc2dd6..2fff7baf5 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -11,7 +11,13 @@ import ( "github.com/spf13/cobra" ) -func syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { +type syncFlags struct { + interval time.Duration + full bool + watch bool +} + +func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { cacheDir, err := b.CacheDir() if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) @@ -20,8 +26,8 @@ func syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOpti opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, - Full: full, - PollInterval: interval, + Full: f.full, + PollInterval: f.interval, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), @@ -29,13 +35,21 @@ func syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOpti return &opts, nil } -var syncCmd = &cobra.Command{ - Use: "sync [flags]", - Short: "Synchronize bundle tree to the workspace", - Args: cobra.NoArgs, +func newSyncCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "sync [flags]", + Short: "Synchronize bundle tree to the workspace", + Args: cobra.NoArgs, - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + var f syncFlags + cmd.Flags().DurationVar(&f.interval, "interval", 1*time.Second, "file system polling interval (for --watch)") + cmd.Flags().BoolVar(&f.full, "full", false, "perform full synchronization (default is incremental)") + cmd.Flags().BoolVar(&f.watch, "watch", false, "watch local file system for changes") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) // Run initialize phase to make sure paths are set. @@ -44,7 +58,7 @@ var syncCmd = &cobra.Command{ return err } - opts, err := syncOptionsFromBundle(cmd, b) + opts, err := f.syncOptionsFromBundle(cmd, b) if err != nil { return err } @@ -57,21 +71,12 @@ var syncCmd = &cobra.Command{ log.Infof(ctx, "Remote file sync location: %v", opts.RemotePath) - if watch { + if f.watch { return s.RunContinuous(ctx) } return s.RunOnce(ctx) - }, -} + } -var interval time.Duration -var full bool -var watch bool - -func init() { - AddCommand(syncCmd) - syncCmd.Flags().DurationVar(&interval, "interval", 1*time.Second, "file system polling interval (for --watch)") - syncCmd.Flags().BoolVar(&full, "full", false, "perform full synchronization (default is incremental)") - syncCmd.Flags().BoolVar(&watch, "watch", false, "watch local file system for changes") + return cmd } diff --git a/cmd/bundle/test.go b/cmd/bundle/test.go index ec36f18a1..ea1a4b716 100644 --- a/cmd/bundle/test.go +++ b/cmd/bundle/test.go @@ -7,16 +7,19 @@ import ( "github.com/spf13/cobra" ) -var testCmd = &cobra.Command{ - Use: "test", - Short: "run tests for the project", - Long: `This is longer description of the command`, +func newTestCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "test", + Short: "run tests for the project", + Long: `This is longer description of the command`, - // We're not ready to expose this command until we specify its semantics. - Hidden: true, + // We're not ready to expose this command until we specify its semantics. + Hidden: true, - PreRunE: root.MustConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: root.MustConfigureBundle, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { return fmt.Errorf("TODO") // results := project.RunPythonOnDev(cmd.Context(), `return 1`) // if results.Failed() { @@ -24,9 +27,7 @@ var testCmd = &cobra.Command{ // } // fmt.Fprintf(cmd.OutOrStdout(), "Success: %s", results.Text()) // return nil - }, -} + } -func init() { - AddCommand(testCmd) + return cmd } diff --git a/cmd/bundle/validate.go b/cmd/bundle/validate.go index 65ab38905..b98cbd52d 100644 --- a/cmd/bundle/validate.go +++ b/cmd/bundle/validate.go @@ -8,12 +8,15 @@ import ( "github.com/spf13/cobra" ) -var validateCmd = &cobra.Command{ - Use: "validate", - Short: "Validate configuration", +func newValidateCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "validate", + Short: "Validate configuration", - PreRunE: ConfigureBundleWithVariables, - RunE: func(cmd *cobra.Command, args []string) error { + PreRunE: ConfigureBundleWithVariables, + } + + cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) err := bundle.Apply(cmd.Context(), b, phases.Initialize()) @@ -27,9 +30,7 @@ var validateCmd = &cobra.Command{ } cmd.OutOrStdout().Write(buf) return nil - }, -} + } -func init() { - AddCommand(validateCmd) + return cmd } diff --git a/cmd/bundle/variables.go b/cmd/bundle/variables.go index b1ab74fe5..33f557cc1 100644 --- a/cmd/bundle/variables.go +++ b/cmd/bundle/variables.go @@ -13,11 +13,16 @@ func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { return err } + variables, err := cmd.Flags().GetStringSlice("var") + if err != nil { + return err + } + // Initialize variables by assigning them values passed as command line flags b := bundle.Get(cmd.Context()) return b.Config.InitializeVariables(variables) } -func AddVariableFlag(cmd *cobra.Command) { - cmd.PersistentFlags().StringSliceVar(&variables, "var", []string{}, `set values for variables defined in bundle config. Example: --var="foo=bar"`) +func initVariableFlag(cmd *cobra.Command) { + cmd.PersistentFlags().StringSlice("var", []string{}, `set values for variables defined in bundle config. Example: --var="foo=bar"`) } diff --git a/cmd/cmd.go b/cmd/cmd.go index 69502d509..04d7cc804 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -1,40 +1,44 @@ package cmd import ( - "sync" - "github.com/databricks/cli/cmd/account" + "github.com/databricks/cli/cmd/api" + "github.com/databricks/cli/cmd/auth" + "github.com/databricks/cli/cmd/bundle" + "github.com/databricks/cli/cmd/configure" + "github.com/databricks/cli/cmd/fs" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd/sync" + "github.com/databricks/cli/cmd/version" "github.com/databricks/cli/cmd/workspace" "github.com/spf13/cobra" ) -var once sync.Once -var cmd *cobra.Command - func New() *cobra.Command { - // TODO: this command is still a global. - // Once the non-generated commands are all instantiatable, - // we can remove the global and instantiate this as well. - once.Do(func() { - cli := root.RootCmd + cli := root.New() - // Add account subcommand. - cli.AddCommand(account.New()) + // Add account subcommand. + cli.AddCommand(account.New()) - // Add workspace subcommands. - for _, cmd := range workspace.All() { - cli.AddCommand(cmd) - } + // Add workspace subcommands. + for _, cmd := range workspace.All() { + cli.AddCommand(cmd) + } - // Add workspace command groups. - groups := workspace.Groups() - for i := range groups { - cli.AddGroup(&groups[i]) - } + // Add workspace command groups. + groups := workspace.Groups() + for i := range groups { + cli.AddGroup(&groups[i]) + } - cmd = cli - }) + // Add other subcommands. + cli.AddCommand(api.New()) + cli.AddCommand(auth.New()) + cli.AddCommand(bundle.New()) + cli.AddCommand(configure.New()) + cli.AddCommand(fs.New()) + cli.AddCommand(sync.New()) + cli.AddCommand(version.New()) - return cmd + return cli } diff --git a/cmd/configure/configure.go b/cmd/configure/configure.go index 14101d593..c51fd8300 100644 --- a/cmd/configure/configure.go +++ b/cmd/configure/configure.go @@ -5,7 +5,6 @@ import ( "fmt" "net/url" - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go/config" @@ -112,19 +111,30 @@ func configureNonInteractive(cmd *cobra.Command, ctx context.Context, cfg *confi return nil } -var configureCmd = &cobra.Command{ - Use: "configure", - Short: "Configure authentication", - Long: `Configure authentication. +func newConfigureCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "configure", + Short: "Configure authentication", + Long: `Configure authentication. -This command adds a profile to your ~/.databrickscfg file. -You can write to a different file by setting the DATABRICKS_CONFIG_FILE environment variable. + This command adds a profile to your ~/.databrickscfg file. + You can write to a different file by setting the DATABRICKS_CONFIG_FILE environment variable. -If this command is invoked in non-interactive mode, it will read the token from stdin. -The host must be specified with the --host flag. - `, - Hidden: true, - RunE: func(cmd *cobra.Command, args []string) error { + If this command is invoked in non-interactive mode, it will read the token from stdin. + The host must be specified with the --host flag. + `, + Hidden: true, + } + + cmd.Flags().String("host", "", "Databricks workspace host.") + cmd.Flags().String("profile", "DEFAULT", "Name for the connection profile to configure.") + + // Include token flag for compatibility with the legacy CLI. + // It doesn't actually do anything because we always use PATs. + cmd.Flags().BoolP("token", "t", true, "Configure using Databricks Personal Access Token") + cmd.Flags().MarkHidden("token") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var cfg config.Config // Load environment variables, possibly the DEFAULT profile. @@ -152,16 +162,11 @@ The host must be specified with the --host flag. // Save profile to config file. return databrickscfg.SaveToProfile(ctx, &cfg) - }, + } + + return cmd } -func init() { - root.RootCmd.AddCommand(configureCmd) - configureCmd.Flags().String("host", "", "Databricks workspace host.") - configureCmd.Flags().String("profile", "DEFAULT", "Name for the connection profile to configure.") - - // Include token flag for compatibility with the legacy CLI. - // It doesn't actually do anything because we always use PATs. - configureCmd.Flags().BoolP("token", "t", true, "Configure using Databricks Personal Access Token") - configureCmd.Flags().MarkHidden("token") +func New() *cobra.Command { + return newConfigureCommand() } diff --git a/cmd/configure/configure_test.go b/cmd/configure/configure_test.go index 7b627ba98..e1ebe916b 100644 --- a/cmd/configure/configure_test.go +++ b/cmd/configure/configure_test.go @@ -1,4 +1,4 @@ -package configure +package configure_test import ( "context" @@ -7,7 +7,7 @@ import ( "runtime" "testing" - "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/cmd" "github.com/stretchr/testify/assert" "gopkg.in/ini.v1" ) @@ -54,9 +54,10 @@ func TestDefaultConfigureNoInteractive(t *testing.T) { }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) + cmd := cmd.New() + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) cfgPath := filepath.Join(tempHomeDir, ".databrickscfg") @@ -86,9 +87,10 @@ func TestConfigFileFromEnvNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) + cmd := cmd.New() + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) _, err = os.Stat(cfgPath) @@ -114,9 +116,10 @@ func TestCustomProfileConfigureNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - root.RootCmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"}) + cmd := cmd.New() + cmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"}) - err := root.RootCmd.ExecuteContext(ctx) + err := cmd.ExecuteContext(ctx) assert.NoError(t, err) _, err = os.Stat(cfgPath) diff --git a/cmd/fs/cat.go b/cmd/fs/cat.go index 2cdc40759..8227cd781 100644 --- a/cmd/fs/cat.go +++ b/cmd/fs/cat.go @@ -6,14 +6,16 @@ import ( "github.com/spf13/cobra" ) -var catCmd = &cobra.Command{ - Use: "cat FILE_PATH", - Short: "Show file content", - Long: `Show the contents of a file.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newCatCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "cat FILE_PATH", + Short: "Show file content", + Long: `Show the contents of a file.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -26,9 +28,7 @@ var catCmd = &cobra.Command{ return err } return cmdio.RenderReader(ctx, r) - }, -} + } -func init() { - fsCmd.AddCommand(catCmd) + return cmd } diff --git a/cmd/fs/cp.go b/cmd/fs/cp.go index 204d6c33c..294d2daba 100644 --- a/cmd/fs/cp.go +++ b/cmd/fs/cp.go @@ -15,6 +15,9 @@ import ( ) type copy struct { + overwrite bool + recursive bool + ctx context.Context sourceFiler filer.Filer targetFiler filer.Filer @@ -48,7 +51,7 @@ func (c *copy) cpWriteCallback(sourceDir, targetDir string) fs.WalkDirFunc { } func (c *copy) cpDirToDir(sourceDir, targetDir string) error { - if !cpRecursive { + if !c.recursive { return fmt.Errorf("source path %s is a directory. Please specify the --recursive flag", sourceDir) } @@ -71,7 +74,7 @@ func (c *copy) cpFileToFile(sourcePath, targetPath string) error { } defer r.Close() - if cpOverwrite { + if c.overwrite { err = c.targetFiler.Write(c.ctx, targetPath, r, filer.OverwriteIfExists) if err != nil { return err @@ -123,28 +126,30 @@ func (c *copy) emitFileCopiedEvent(sourcePath, targetPath string) error { return cmdio.RenderWithTemplate(c.ctx, event, template) } -var cpOverwrite bool -var cpRecursive bool +func newCpCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "cp SOURCE_PATH TARGET_PATH", + Short: "Copy files and directories to and from DBFS.", + Long: `Copy files to and from DBFS. -// cpCmd represents the fs cp command -var cpCmd = &cobra.Command{ - Use: "cp SOURCE_PATH TARGET_PATH", - Short: "Copy files and directories to and from DBFS.", - Long: `Copy files to and from DBFS. + For paths in DBFS it is required that you specify the "dbfs" scheme. + For example: dbfs:/foo/bar. - For paths in DBFS it is required that you specify the "dbfs" scheme. - For example: dbfs:/foo/bar. + Recursively copying a directory will copy all files inside directory + at SOURCE_PATH to the directory at TARGET_PATH. - Recursively copying a directory will copy all files inside directory - at SOURCE_PATH to the directory at TARGET_PATH. + When copying a file, if TARGET_PATH is a directory, the file will be created + inside the directory, otherwise the file is created at TARGET_PATH. + `, + Args: cobra.ExactArgs(2), + PreRunE: root.MustWorkspaceClient, + } - When copying a file, if TARGET_PATH is a directory, the file will be created - inside the directory, otherwise the file is created at TARGET_PATH. -`, - Args: cobra.ExactArgs(2), - PreRunE: root.MustWorkspaceClient, + var c copy + cmd.Flags().BoolVar(&c.overwrite, "overwrite", false, "overwrite existing files") + cmd.Flags().BoolVarP(&c.recursive, "recursive", "r", false, "recursively copy files from directory") - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() // TODO: Error if a user uses '\' as path separator on windows when "file" @@ -164,22 +169,18 @@ var cpCmd = &cobra.Command{ return err } - sourceScheme := "" + c.sourceScheme = "" if isDbfsPath(fullSourcePath) { - sourceScheme = "dbfs" + c.sourceScheme = "dbfs" } - targetScheme := "" + c.targetScheme = "" if isDbfsPath(fullTargetPath) { - targetScheme = "dbfs" + c.targetScheme = "dbfs" } - c := copy{ - ctx: ctx, - sourceFiler: sourceFiler, - targetFiler: targetFiler, - sourceScheme: sourceScheme, - targetScheme: targetScheme, - } + c.ctx = ctx + c.sourceFiler = sourceFiler + c.targetFiler = targetFiler // Get information about file at source path sourceInfo, err := sourceFiler.Stat(ctx, sourcePath) @@ -200,11 +201,7 @@ var cpCmd = &cobra.Command{ // case 3: source path is a file, and target path is a file return c.cpFileToFile(sourcePath, targetPath) - }, -} + } -func init() { - cpCmd.Flags().BoolVar(&cpOverwrite, "overwrite", false, "overwrite existing files") - cpCmd.Flags().BoolVarP(&cpRecursive, "recursive", "r", false, "recursively copy files from directory") - fsCmd.AddCommand(cpCmd) + return cmd } diff --git a/cmd/fs/fs.go b/cmd/fs/fs.go index a69c4b62d..190220f4a 100644 --- a/cmd/fs/fs.go +++ b/cmd/fs/fs.go @@ -1,17 +1,23 @@ package fs import ( - "github.com/databricks/cli/cmd/root" "github.com/spf13/cobra" ) -// fsCmd represents the fs command -var fsCmd = &cobra.Command{ - Use: "fs", - Short: "Filesystem related commands", - Long: `Commands to do DBFS operations.`, -} +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "fs", + Short: "Filesystem related commands", + Long: `Commands to do DBFS operations.`, + } -func init() { - root.RootCmd.AddCommand(fsCmd) + cmd.AddCommand( + newCatCommand(), + newCpCommand(), + newLsCommand(), + newMkdirCommand(), + newRmCommand(), + ) + + return cmd } diff --git a/cmd/fs/ls.go b/cmd/fs/ls.go index b06345d50..7ae55e1f4 100644 --- a/cmd/fs/ls.go +++ b/cmd/fs/ls.go @@ -37,15 +37,21 @@ func toJsonDirEntry(f fs.DirEntry, baseDir string, isAbsolute bool) (*jsonDirEnt }, nil } -// lsCmd represents the ls command -var lsCmd = &cobra.Command{ - Use: "ls DIR_PATH", - Short: "Lists files", - Long: `Lists files`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newLsCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "ls DIR_PATH", + Short: "Lists files", + Long: `Lists files`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + var long bool + var absolute bool + cmd.Flags().BoolVarP(&long, "long", "l", false, "Displays full information including size, file type and modification time since Epoch in milliseconds.") + cmd.Flags().BoolVar(&absolute, "absolute", false, "Displays absolute paths.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -60,7 +66,7 @@ var lsCmd = &cobra.Command{ jsonDirEntries := make([]jsonDirEntry, len(entries)) for i, entry := range entries { - jsonDirEntry, err := toJsonDirEntry(entry, args[0], lsAbsolute) + jsonDirEntry, err := toJsonDirEntry(entry, args[0], absolute) if err != nil { return err } @@ -71,7 +77,7 @@ var lsCmd = &cobra.Command{ }) // Use template for long mode if the flag is set - if longMode { + if long { return cmdio.RenderWithTemplate(ctx, jsonDirEntries, cmdio.Heredoc(` {{range .}}{{if .IsDir}}DIRECTORY {{else}}FILE {{end}}{{.Size}} {{.ModTime|pretty_date}} {{.Name}} {{end}} @@ -81,14 +87,7 @@ var lsCmd = &cobra.Command{ {{range .}}{{.Name}} {{end}} `)) - }, -} + } -var longMode bool -var lsAbsolute bool - -func init() { - lsCmd.Flags().BoolVarP(&longMode, "long", "l", false, "Displays full information including size, file type and modification time since Epoch in milliseconds.") - lsCmd.Flags().BoolVar(&lsAbsolute, "absolute", false, "Displays absolute paths.") - fsCmd.AddCommand(lsCmd) + return cmd } diff --git a/cmd/fs/mkdir.go b/cmd/fs/mkdir.go index cb0491393..c6a5e607c 100644 --- a/cmd/fs/mkdir.go +++ b/cmd/fs/mkdir.go @@ -5,17 +5,19 @@ import ( "github.com/spf13/cobra" ) -var mkdirCmd = &cobra.Command{ - Use: "mkdir DIR_PATH", - // Alias `mkdirs` for this command exists for legacy purposes. This command - // is called databricks fs mkdirs in our legacy CLI: https://github.com/databricks/databricks-cli - Aliases: []string{"mkdirs"}, - Short: "Make directories", - Long: `Mkdir will create directories along the path to the argument directory.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newMkdirCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "mkdir DIR_PATH", + // Alias `mkdirs` for this command exists for legacy purposes. This command + // is called databricks fs mkdirs in our legacy CLI: https://github.com/databricks/databricks-cli + Aliases: []string{"mkdirs"}, + Short: "Make directories", + Long: `Mkdir will create directories along the path to the argument directory.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -24,9 +26,7 @@ var mkdirCmd = &cobra.Command{ } return f.Mkdir(ctx, path) - }, -} + } -func init() { - fsCmd.AddCommand(mkdirCmd) + return cmd } diff --git a/cmd/fs/rm.go b/cmd/fs/rm.go index 21f5adb99..3ce8d3b93 100644 --- a/cmd/fs/rm.go +++ b/cmd/fs/rm.go @@ -6,14 +6,19 @@ import ( "github.com/spf13/cobra" ) -var rmCmd = &cobra.Command{ - Use: "rm PATH", - Short: "Remove files and directories from dbfs.", - Long: `Remove files and directories from dbfs.`, - Args: cobra.ExactArgs(1), - PreRunE: root.MustWorkspaceClient, +func newRmCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "rm PATH", + Short: "Remove files and directories from dbfs.", + Long: `Remove files and directories from dbfs.`, + Args: cobra.ExactArgs(1), + PreRunE: root.MustWorkspaceClient, + } - RunE: func(cmd *cobra.Command, args []string) error { + var recursive bool + cmd.Flags().BoolVarP(&recursive, "recursive", "r", false, "Recursively delete a non-empty directory.") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() f, path, err := filerForPath(ctx, args[0]) @@ -25,12 +30,7 @@ var rmCmd = &cobra.Command{ return f.Delete(ctx, path, filer.DeleteRecursively) } return f.Delete(ctx, path) - }, -} + } -var recursive bool - -func init() { - rmCmd.Flags().BoolVarP(&recursive, "recursive", "r", false, "Recursively delete a non-empty directory.") - fsCmd.AddCommand(rmCmd) + return cmd } diff --git a/cmd/root/root.go b/cmd/root/root.go index 45fc27f2c..0a18594a1 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -115,6 +115,3 @@ func Execute(cmd *cobra.Command) { os.Exit(1) } } - -// Keep a global copy until all commands can be initialized. -var RootCmd = New() diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index 51d71ea2f..d2aad0c3f 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -17,7 +17,15 @@ import ( "github.com/spf13/cobra" ) -func syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) (*sync.SyncOptions, error) { +type syncFlags struct { + // project files polling interval + interval time.Duration + full bool + watch bool + output flags.Output +} + +func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) (*sync.SyncOptions, error) { if len(args) > 0 { return nil, fmt.Errorf("SRC and DST are not configurable in the context of a bundle") } @@ -30,8 +38,8 @@ func syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, - Full: full, - PollInterval: interval, + Full: f.full, + PollInterval: f.interval, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), @@ -39,7 +47,7 @@ func syncOptionsFromBundle(cmd *cobra.Command, args []string, b *bundle.Bundle) return &opts, nil } -func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, error) { +func (f *syncFlags) syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, error) { if len(args) != 2 { return nil, flag.ErrHelp } @@ -47,8 +55,8 @@ func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, opts := sync.SyncOptions{ LocalPath: args[0], RemotePath: args[1], - Full: full, - PollInterval: interval, + Full: f.full, + PollInterval: f.interval, // We keep existing behavior for VS Code extension where if there is // no bundle defined, we store the snapshots in `.databricks`. @@ -60,13 +68,22 @@ func syncOptionsFromArgs(cmd *cobra.Command, args []string) (*sync.SyncOptions, return &opts, nil } -var syncCmd = &cobra.Command{ - Use: "sync [flags] SRC DST", - Short: "Synchronize a local directory to a workspace directory", - Args: cobra.MaximumNArgs(2), +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "sync [flags] SRC DST", + Short: "Synchronize a local directory to a workspace directory", + Args: cobra.MaximumNArgs(2), + } - // PreRunE: root.TryConfigureBundle, - RunE: func(cmd *cobra.Command, args []string) error { + f := syncFlags{ + output: flags.OutputText, + } + cmd.Flags().DurationVar(&f.interval, "interval", 1*time.Second, "file system polling interval (for --watch)") + cmd.Flags().BoolVar(&f.full, "full", false, "perform full synchronization (default is incremental)") + cmd.Flags().BoolVar(&f.watch, "watch", false, "watch local file system for changes") + cmd.Flags().Var(&f.output, "output", "type of output format") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { var opts *sync.SyncOptions var err error @@ -84,7 +101,7 @@ var syncCmd = &cobra.Command{ // } // opts, err = syncOptionsFromBundle(cmd, args, b) // } else { - opts, err = syncOptionsFromArgs(cmd, args) + opts, err = f.syncOptionsFromArgs(cmd, args) // } if err != nil { return err @@ -97,7 +114,7 @@ var syncCmd = &cobra.Command{ } var outputFunc func(context.Context, <-chan sync.Event, io.Writer) - switch output { + switch f.output { case flags.OutputText: outputFunc = textOutput case flags.OutputJSON: @@ -113,7 +130,7 @@ var syncCmd = &cobra.Command{ }() } - if watch { + if f.watch { err = s.RunContinuous(ctx) } else { err = s.RunOnce(ctx) @@ -122,9 +139,9 @@ var syncCmd = &cobra.Command{ s.Close() wg.Wait() return err - }, + } - ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { err := root.TryConfigureBundle(cmd, args) if err != nil { return nil, cobra.ShellCompDirectiveError @@ -149,19 +166,7 @@ var syncCmd = &cobra.Command{ default: return nil, cobra.ShellCompDirectiveNoFileComp } - }, -} + } -// project files polling interval -var interval time.Duration -var full bool -var watch bool -var output flags.Output = flags.OutputText - -func init() { - root.RootCmd.AddCommand(syncCmd) - syncCmd.Flags().DurationVar(&interval, "interval", 1*time.Second, "file system polling interval (for --watch)") - syncCmd.Flags().BoolVar(&full, "full", false, "perform full synchronization (default is incremental)") - syncCmd.Flags().BoolVar(&watch, "watch", false, "watch local file system for changes") - syncCmd.Flags().Var(&output, "output", "type of output format") + return cmd } diff --git a/cmd/sync/sync_test.go b/cmd/sync/sync_test.go index 2d8c8b113..a6eedbe6e 100644 --- a/cmd/sync/sync_test.go +++ b/cmd/sync/sync_test.go @@ -27,7 +27,8 @@ func TestSyncOptionsFromBundle(t *testing.T) { }, } - opts, err := syncOptionsFromBundle(syncCmd, []string{}, b) + f := syncFlags{} + opts, err := f.syncOptionsFromBundle(New(), []string{}, b) require.NoError(t, err) assert.Equal(t, tempDir, opts.LocalPath) assert.Equal(t, "/Users/jane@doe.com/path", opts.RemotePath) @@ -37,16 +38,18 @@ func TestSyncOptionsFromBundle(t *testing.T) { func TestSyncOptionsFromArgsRequiredTwoArgs(t *testing.T) { var err error - _, err = syncOptionsFromArgs(syncCmd, []string{}) + f := syncFlags{} + _, err = f.syncOptionsFromArgs(New(), []string{}) require.ErrorIs(t, err, flag.ErrHelp) - _, err = syncOptionsFromArgs(syncCmd, []string{"foo"}) + _, err = f.syncOptionsFromArgs(New(), []string{"foo"}) require.ErrorIs(t, err, flag.ErrHelp) - _, err = syncOptionsFromArgs(syncCmd, []string{"foo", "bar", "qux"}) + _, err = f.syncOptionsFromArgs(New(), []string{"foo", "bar", "qux"}) require.ErrorIs(t, err, flag.ErrHelp) } func TestSyncOptionsFromArgs(t *testing.T) { - opts, err := syncOptionsFromArgs(syncCmd, []string{"/local", "/remote"}) + f := syncFlags{} + opts, err := f.syncOptionsFromArgs(New(), []string{"/local", "/remote"}) require.NoError(t, err) assert.Equal(t, "/local", opts.LocalPath) assert.Equal(t, "/remote", opts.RemotePath) diff --git a/cmd/version/version.go b/cmd/version/version.go index 1f772424f..17bb4b9af 100644 --- a/cmd/version/version.go +++ b/cmd/version/version.go @@ -1,25 +1,24 @@ package version import ( - "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" "github.com/spf13/cobra" ) -var versionCmd = &cobra.Command{ - Use: "version", - Args: cobra.NoArgs, +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "version", + Args: cobra.NoArgs, - Annotations: map[string]string{ - "template": "Databricks CLI v{{.Version}}\n", - }, + Annotations: map[string]string{ + "template": "Databricks CLI v{{.Version}}\n", + }, + } - RunE: func(cmd *cobra.Command, args []string) error { + cmd.RunE = func(cmd *cobra.Command, args []string) error { return cmdio.Render(cmd.Context(), build.GetInfo()) - }, -} + } -func init() { - root.RootCmd.AddCommand(versionCmd) + return cmd } diff --git a/internal/secrets_test.go b/internal/secrets_test.go index 1e9c86abf..b030071bb 100644 --- a/internal/secrets_test.go +++ b/internal/secrets_test.go @@ -77,13 +77,6 @@ func TestSecretsPutSecretStringValue(tt *testing.T) { func TestSecretsPutSecretBytesValue(tt *testing.T) { ctx, t := acc.WorkspaceTest(tt) - - if true { - // Uncomment below to run this test in isolation. - // To be addressed once none of the commands taint global state. - t.Skip("skipping because the test above clobbers global state") - } - scope := temporarySecretScope(ctx, t) key := "test-key" value := []byte{0x00, 0x01, 0x02, 0x03} diff --git a/main.go b/main.go index 414e42d0b..a4b8aabd6 100644 --- a/main.go +++ b/main.go @@ -2,17 +2,7 @@ package main import ( "github.com/databricks/cli/cmd" - _ "github.com/databricks/cli/cmd/account" - _ "github.com/databricks/cli/cmd/api" - _ "github.com/databricks/cli/cmd/auth" - _ "github.com/databricks/cli/cmd/bundle" - _ "github.com/databricks/cli/cmd/bundle/debug" - _ "github.com/databricks/cli/cmd/configure" - _ "github.com/databricks/cli/cmd/fs" "github.com/databricks/cli/cmd/root" - _ "github.com/databricks/cli/cmd/sync" - _ "github.com/databricks/cli/cmd/version" - _ "github.com/databricks/cli/cmd/workspace" ) func main() { From e3a181b03d96c7e86e9391c2733cbbd039369641 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 27 Jul 2023 12:47:38 +0200 Subject: [PATCH 026/139] Update CHANGELOG template (#588) Made the recent release reflect this template. --------- Co-authored-by: Miles Yucht --- .codegen/changelog.md.tmpl | 23 +++++++++++++++++------ CHANGELOG.md | 24 +++++++++++++----------- 2 files changed, 30 insertions(+), 17 deletions(-) diff --git a/.codegen/changelog.md.tmpl b/.codegen/changelog.md.tmpl index 83f1b7712..018fb1cb7 100644 --- a/.codegen/changelog.md.tmpl +++ b/.codegen/changelog.md.tmpl @@ -2,18 +2,29 @@ ## {{.Version}} -{{range .Changes -}} +CLI: +{{- range .Changes}} * {{.}}. -{{end}}{{- if .ApiChanges}} +{{- end}} + +Bundles: + * **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST** + +Internal: + * **FILL THIS IN MANUALLY BY MOVING RELEVANT ITEMS FROM ABOVE LIST** + +{{ if .ApiChanges -}} API Changes: -{{range .ApiChanges}}{{if or (eq .X "method") (eq .X "service")}} +{{- range .ApiChanges}}{{if or (eq .X "method") (eq .X "service")}} * {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}. {{- end}}{{- end}} -OpenAPI SHA: {{.Sha}}, Date: {{.Changed}} -{{- end}}{{if .DependencyUpdates}} +OpenAPI commit {{.Sha}} ({{.Changed}}) +{{- end }} + +{{- if .DependencyUpdates }} Dependency updates: -{{range .DependencyUpdates}} +{{- range .DependencyUpdates}} * {{.}}. {{- end -}} {{end}} diff --git a/CHANGELOG.md b/CHANGELOG.md index 8c045ef0b..c990af50a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,17 +2,19 @@ ## 0.201.0 -* Add development runs ([#522](https://github.com/databricks/cli/pull/522)). -* Support tab completion for profiles ([#572](https://github.com/databricks/cli/pull/572)). -* Correctly use --profile flag passed for all bundle commands ([#571](https://github.com/databricks/cli/pull/571)). -* Disallow notebooks in paths where files are expected ([#573](https://github.com/databricks/cli/pull/573)). -* Improve auth login experience ([#570](https://github.com/databricks/cli/pull/570)). -* Remove base path checks during sync ([#576](https://github.com/databricks/cli/pull/576)). -* First look for databricks.yml before falling back to bundle.yml ([#580](https://github.com/databricks/cli/pull/580)). -* Integrate with auto-release infra ([#581](https://github.com/databricks/cli/pull/581)). +CLI: + * Support tab completion for profiles ([#572](https://github.com/databricks/cli/pull/572)). + * Improve auth login experience ([#570](https://github.com/databricks/cli/pull/570)). + * Integrate with auto-release infra ([#581](https://github.com/databricks/cli/pull/581)). + +Bundles: + * Add development runs ([#522](https://github.com/databricks/cli/pull/522)). + * Correctly use --profile flag passed for all bundle commands ([#571](https://github.com/databricks/cli/pull/571)). + * Disallow notebooks in paths where files are expected ([#573](https://github.com/databricks/cli/pull/573)). + * Remove base path checks during sync ([#576](https://github.com/databricks/cli/pull/576)). + * First look for databricks.yml before falling back to bundle.yml ([#580](https://github.com/databricks/cli/pull/580)). API Changes: - * Removed `databricks metastores maintenance` command. * Added `databricks metastores enable-optimization` command. * Added `databricks tables update` command. @@ -20,9 +22,9 @@ API Changes: * Changed `databricks account settings read-personal-compute-setting` command with new required argument order. * Added `databricks clean-rooms` command group. -OpenAPI SHA: 850a075ed9758d21a6bc4409506b48c8b9f93ab4, Date: 2023-07-18 -Dependency updates: +OpenAPI commit 850a075ed9758d21a6bc4409506b48c8b9f93ab4 (2023-07-18) +Dependency updates: * Bump golang.org/x/term from 0.9.0 to 0.10.0 ([#567](https://github.com/databricks/cli/pull/567)). * Bump golang.org/x/oauth2 from 0.9.0 to 0.10.0 ([#566](https://github.com/databricks/cli/pull/566)). * Bump golang.org/x/mod from 0.11.0 to 0.12.0 ([#568](https://github.com/databricks/cli/pull/568)). From 3697dfcb51781b69c1dd770773d39f4e8516961c Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 27 Jul 2023 15:23:55 +0200 Subject: [PATCH 027/139] Release v0.202.0 (#619) Breaking Change: * Require include glob patterns to be explicitly defined ([#602](https://github.com/databricks/cli/pull/602)). Bundles: * Add support for more SDK config options ([#587](https://github.com/databricks/cli/pull/587)). * Add template renderer for Databricks templates ([#589](https://github.com/databricks/cli/pull/589)). * Fix formatting in renderer.go ([#593](https://github.com/databricks/cli/pull/593)). * Fixed python wheel test ([#608](https://github.com/databricks/cli/pull/608)). * Auto detect Python wheel packages and infer build command ([#603](https://github.com/databricks/cli/pull/603)). * Added support for artifacts building for bundles ([#583](https://github.com/databricks/cli/pull/583)). * Add support for cloning repositories ([#544](https://github.com/databricks/cli/pull/544)). * Add regexp compile helper function for templates ([#601](https://github.com/databricks/cli/pull/601)). * Add unit test that raw strings are printed as is ([#599](https://github.com/databricks/cli/pull/599)). Internal: * Fix tests under ./cmd/configure if DATABRICKS_TOKEN is set ([#605](https://github.com/databricks/cli/pull/605)). * Remove dependency on global state in generated commands ([#595](https://github.com/databricks/cli/pull/595)). * Remove dependency on global state for the root command ([#606](https://github.com/databricks/cli/pull/606)). * Add merge_group trigger for build ([#612](https://github.com/databricks/cli/pull/612)). * Added support for build command chaining and error on missing wheel ([#607](https://github.com/databricks/cli/pull/607)). * Add TestAcc prefix to filer test and fix any failing tests ([#611](https://github.com/databricks/cli/pull/611)). * Add url parse helper function for templates ([#600](https://github.com/databricks/cli/pull/600)). * Remove dependency on global state for remaining commands ([#613](https://github.com/databricks/cli/pull/613)). * Update CHANGELOG template ([#588](https://github.com/databricks/cli/pull/588)). --- .codegen/_openapi_sha | 2 +- CHANGELOG.md | 29 ++ cmd/account/budgets/budgets.go | 44 ++- cmd/account/credentials/credentials.go | 44 ++- cmd/account/groups/groups.go | 93 +++++-- .../ip-access-lists/ip-access-lists.go | 44 ++- cmd/account/log-delivery/log-delivery.go | 22 +- cmd/account/networks/networks.go | 46 ++- cmd/account/private-access/private-access.go | 46 ++- .../service-principals/service-principals.go | 93 +++++-- cmd/account/storage/storage.go | 44 ++- cmd/account/users/users.go | 93 +++++-- cmd/account/vpc-endpoints/vpc-endpoints.go | 46 ++- cmd/account/workspaces/workspaces.go | 66 ++++- cmd/workspace/alerts/alerts.go | 44 ++- .../cluster-policies/cluster-policies.go | 49 +++- cmd/workspace/clusters/clusters.go | 223 ++++++++++----- cmd/workspace/connections/connections.go | 44 ++- cmd/workspace/dashboards/dashboards.go | 66 ++++- cmd/workspace/functions/functions.go | 66 ++++- .../git-credentials/git-credentials.go | 66 ++++- .../global-init-scripts.go | 46 ++- cmd/workspace/groups/groups.go | 93 +++++-- .../instance-pools/instance-pools.go | 49 +++- .../ip-access-lists/ip-access-lists.go | 44 ++- cmd/workspace/jobs/jobs.go | 263 +++++++++++++----- cmd/workspace/metastores/metastores.go | 88 ++++-- cmd/workspace/pipelines/pipelines.go | 179 +++++++++--- cmd/workspace/providers/providers.go | 91 ++++-- cmd/workspace/queries/queries.go | 88 ++++-- cmd/workspace/recipients/recipients.go | 91 ++++-- cmd/workspace/repos/repos.go | 66 ++++- cmd/workspace/schemas/schemas.go | 68 ++++- .../service-principals/service-principals.go | 93 +++++-- .../storage-credentials.go | 71 +++-- cmd/workspace/tables/tables.go | 90 ++++-- .../token-management/token-management.go | 44 ++- cmd/workspace/tokens/tokens.go | 27 +- cmd/workspace/users/users.go | 93 +++++-- cmd/workspace/volumes/volumes.go | 66 ++++- cmd/workspace/warehouses/warehouses.go | 110 ++++++-- cmd/workspace/workspace/workspace.go | 74 +++-- 42 files changed, 2343 insertions(+), 761 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ae66d336a..1079283dd 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -850a075ed9758d21a6bc4409506b48c8b9f93ab4 \ No newline at end of file +0a1949ba96f71680dad30e06973eaae85b1307bb \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index c990af50a..f0b1f6968 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,34 @@ # Version changelog +## 0.202.0 + +Breaking Change: + * Require include glob patterns to be explicitly defined ([#602](https://github.com/databricks/cli/pull/602)). + +Bundles: + * Add support for more SDK config options ([#587](https://github.com/databricks/cli/pull/587)). + * Add template renderer for Databricks templates ([#589](https://github.com/databricks/cli/pull/589)). + * Fix formatting in renderer.go ([#593](https://github.com/databricks/cli/pull/593)). + * Fixed python wheel test ([#608](https://github.com/databricks/cli/pull/608)). + * Auto detect Python wheel packages and infer build command ([#603](https://github.com/databricks/cli/pull/603)). + * Added support for artifacts building for bundles ([#583](https://github.com/databricks/cli/pull/583)). + * Add support for cloning repositories ([#544](https://github.com/databricks/cli/pull/544)). + * Add regexp compile helper function for templates ([#601](https://github.com/databricks/cli/pull/601)). + * Add unit test that raw strings are printed as is ([#599](https://github.com/databricks/cli/pull/599)). + +Internal: + * Fix tests under ./cmd/configure if DATABRICKS_TOKEN is set ([#605](https://github.com/databricks/cli/pull/605)). + * Remove dependency on global state in generated commands ([#595](https://github.com/databricks/cli/pull/595)). + * Remove dependency on global state for the root command ([#606](https://github.com/databricks/cli/pull/606)). + * Add merge_group trigger for build ([#612](https://github.com/databricks/cli/pull/612)). + * Added support for build command chaining and error on missing wheel ([#607](https://github.com/databricks/cli/pull/607)). + * Add TestAcc prefix to filer test and fix any failing tests ([#611](https://github.com/databricks/cli/pull/611)). + * Add url parse helper function for templates ([#600](https://github.com/databricks/cli/pull/600)). + * Remove dependency on global state for remaining commands ([#613](https://github.com/databricks/cli/pull/613)). + * Update CHANGELOG template ([#588](https://github.com/databricks/cli/pull/588)). + + + ## 0.201.0 CLI: diff --git a/cmd/account/budgets/budgets.go b/cmd/account/budgets/budgets.go index ed8b4591a..1a0c7a0a9 100755 --- a/cmd/account/budgets/budgets.go +++ b/cmd/account/budgets/budgets.go @@ -128,16 +128,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down." + names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Budget ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have budget id") + } deleteReq.BudgetId = args[0] err = a.Budgets.Delete(ctx, deleteReq) @@ -190,16 +202,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No BUDGET_ID argument specified. Loading names for Budgets drop-down." + names, err := a.Budgets.BudgetWithStatusNameToBudgetIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Budgets drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Budget ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have budget id") + } getReq.BudgetId = args[0] response, err := a.Budgets.Get(ctx, getReq) diff --git a/cmd/account/credentials/credentials.go b/cmd/account/credentials/credentials.go index 35c8869a8..99204bfbd 100755 --- a/cmd/account/credentials/credentials.go +++ b/cmd/account/credentials/credentials.go @@ -143,16 +143,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CREDENTIALS_ID argument specified. Loading names for Credentials drop-down." + names, err := a.Credentials.CredentialCredentialsNameToCredentialsIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API credential configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api credential configuration id") + } deleteReq.CredentialsId = args[0] err = a.Credentials.Delete(ctx, deleteReq) @@ -205,16 +217,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CREDENTIALS_ID argument specified. Loading names for Credentials drop-down." + names, err := a.Credentials.CredentialCredentialsNameToCredentialsIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API credential configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api credential configuration id") + } getReq.CredentialsId = args[0] response, err := a.Credentials.Get(ctx, getReq) diff --git a/cmd/account/groups/groups.go b/cmd/account/groups/groups.go index 09594fa3a..04298b49c 100755 --- a/cmd/account/groups/groups.go +++ b/cmd/account/groups/groups.go @@ -3,6 +3,8 @@ package groups import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -145,16 +147,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." + names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks account") + } deleteReq.Id = args[0] err = a.Groups.Delete(ctx, deleteReq) @@ -206,16 +220,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." + names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks account") + } getReq.Id = args[0] response, err := a.Groups.Get(ctx, getReq) @@ -352,11 +378,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -368,6 +389,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." + names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks account") + } patchReq.Id = args[0] err = a.Groups.Patch(ctx, patchReq) @@ -430,14 +468,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -449,6 +479,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Groups drop-down." + names, err := a.Groups.GroupDisplayNameToIdMap(ctx, iam.ListAccountGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks group ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks group id") + } updateReq.Id = args[0] } diff --git a/cmd/account/ip-access-lists/ip-access-lists.go b/cmd/account/ip-access-lists/ip-access-lists.go index 980dc7776..328883ac3 100755 --- a/cmd/account/ip-access-lists/ip-access-lists.go +++ b/cmd/account/ip-access-lists/ip-access-lists.go @@ -158,16 +158,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Account Ip Access Lists drop-down." + names, err := a.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding ip access list") + } deleteReq.IpAccessListId = args[0] err = a.IpAccessLists.Delete(ctx, deleteReq) @@ -219,16 +231,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Account Ip Access Lists drop-down." + names, err := a.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding ip access list") + } getReq.IpAccessListId = args[0] response, err := a.IpAccessLists.Get(ctx, getReq) diff --git a/cmd/account/log-delivery/log-delivery.go b/cmd/account/log-delivery/log-delivery.go index 2018932ee..a7ed39dca 100755 --- a/cmd/account/log-delivery/log-delivery.go +++ b/cmd/account/log-delivery/log-delivery.go @@ -218,16 +218,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No LOG_DELIVERY_CONFIGURATION_ID argument specified. Loading names for Log Delivery drop-down." + names, err := a.LogDelivery.LogDeliveryConfigurationConfigNameToConfigIdMap(ctx, billing.ListLogDeliveryRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Log Delivery drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks log delivery configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks log delivery configuration id") + } getReq.LogDeliveryConfigurationId = args[0] response, err := a.LogDelivery.Get(ctx, getReq) diff --git a/cmd/account/networks/networks.go b/cmd/account/networks/networks.go index 36867cf25..f481ffdbd 100755 --- a/cmd/account/networks/networks.go +++ b/cmd/account/networks/networks.go @@ -3,6 +3,8 @@ package networks import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -144,16 +146,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NETWORK_ID argument specified. Loading names for Networks drop-down." + names, err := a.Networks.NetworkNetworkNameToNetworkIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Networks drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API network configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api network configuration id") + } deleteReq.NetworkId = args[0] err = a.Networks.Delete(ctx, deleteReq) @@ -206,16 +220,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NETWORK_ID argument specified. Loading names for Networks drop-down." + names, err := a.Networks.NetworkNetworkNameToNetworkIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Networks drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API network configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api network configuration id") + } getReq.NetworkId = args[0] response, err := a.Networks.Get(ctx, getReq) diff --git a/cmd/account/private-access/private-access.go b/cmd/account/private-access/private-access.go index 419886a80..9cbc09290 100755 --- a/cmd/account/private-access/private-access.go +++ b/cmd/account/private-access/private-access.go @@ -3,6 +3,8 @@ package private_access import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -156,16 +158,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PRIVATE_ACCESS_SETTINGS_ID argument specified. Loading names for Private Access drop-down." + names, err := a.PrivateAccess.PrivateAccessSettingsPrivateAccessSettingsNameToPrivateAccessSettingsIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Private Access drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API private access settings ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api private access settings id") + } deleteReq.PrivateAccessSettingsId = args[0] err = a.PrivateAccess.Delete(ctx, deleteReq) @@ -224,16 +238,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PRIVATE_ACCESS_SETTINGS_ID argument specified. Loading names for Private Access drop-down." + names, err := a.PrivateAccess.PrivateAccessSettingsPrivateAccessSettingsNameToPrivateAccessSettingsIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Private Access drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API private access settings ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api private access settings id") + } getReq.PrivateAccessSettingsId = args[0] response, err := a.PrivateAccess.Get(ctx, getReq) diff --git a/cmd/account/service-principals/service-principals.go b/cmd/account/service-principals/service-principals.go index 7ab354337..481af9789 100755 --- a/cmd/account/service-principals/service-principals.go +++ b/cmd/account/service-principals/service-principals.go @@ -3,6 +3,8 @@ package service_principals import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -143,16 +145,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." + names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks account") + } deleteReq.Id = args[0] err = a.ServicePrincipals.Delete(ctx, deleteReq) @@ -205,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." + names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks account") + } getReq.Id = args[0] response, err := a.ServicePrincipals.Get(ctx, getReq) @@ -352,11 +378,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -368,6 +389,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." + names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks account") + } patchReq.Id = args[0] err = a.ServicePrincipals.Patch(ctx, patchReq) @@ -432,14 +470,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -451,6 +481,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Service Principals drop-down." + names, err := a.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListAccountServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks service principal ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks service principal id") + } updateReq.Id = args[0] } diff --git a/cmd/account/storage/storage.go b/cmd/account/storage/storage.go index 19240ccba..8eebbab1d 100755 --- a/cmd/account/storage/storage.go +++ b/cmd/account/storage/storage.go @@ -139,16 +139,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No STORAGE_CONFIGURATION_ID argument specified. Loading names for Storage drop-down." + names, err := a.Storage.StorageConfigurationStorageConfigurationNameToStorageConfigurationIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Storage drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API storage configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api storage configuration id") + } deleteReq.StorageConfigurationId = args[0] err = a.Storage.Delete(ctx, deleteReq) @@ -200,16 +212,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No STORAGE_CONFIGURATION_ID argument specified. Loading names for Storage drop-down." + names, err := a.Storage.StorageConfigurationStorageConfigurationNameToStorageConfigurationIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Storage drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks Account API storage configuration ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks account api storage configuration id") + } getReq.StorageConfigurationId = args[0] response, err := a.Storage.Get(ctx, getReq) diff --git a/cmd/account/users/users.go b/cmd/account/users/users.go index 117fe26ce..7e84f90f7 100755 --- a/cmd/account/users/users.go +++ b/cmd/account/users/users.go @@ -3,6 +3,8 @@ package users import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -152,16 +154,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." + names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks account") + } deleteReq.Id = args[0] err = a.Users.Delete(ctx, deleteReq) @@ -213,16 +227,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." + names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks account") + } getReq.Id = args[0] response, err := a.Users.Get(ctx, getReq) @@ -360,11 +386,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -376,6 +397,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." + names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks account") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks account") + } patchReq.Id = args[0] err = a.Users.Patch(ctx, patchReq) @@ -440,14 +478,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -459,6 +489,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Account Users drop-down." + names, err := a.Users.UserUserNameToIdMap(ctx, iam.ListAccountUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Account Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks user ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks user id") + } updateReq.Id = args[0] } diff --git a/cmd/account/vpc-endpoints/vpc-endpoints.go b/cmd/account/vpc-endpoints/vpc-endpoints.go index d9c0f6664..5112b48d7 100755 --- a/cmd/account/vpc-endpoints/vpc-endpoints.go +++ b/cmd/account/vpc-endpoints/vpc-endpoints.go @@ -3,6 +3,8 @@ package vpc_endpoints import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -154,16 +156,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No VPC_ENDPOINT_ID argument specified. Loading names for Vpc Endpoints drop-down." + names, err := a.VpcEndpoints.VpcEndpointVpcEndpointNameToVpcEndpointIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Vpc Endpoints drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks VPC endpoint ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks vpc endpoint id") + } deleteReq.VpcEndpointId = args[0] err = a.VpcEndpoints.Delete(ctx, deleteReq) @@ -219,16 +233,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No VPC_ENDPOINT_ID argument specified. Loading names for Vpc Endpoints drop-down." + names, err := a.VpcEndpoints.VpcEndpointVpcEndpointNameToVpcEndpointIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Vpc Endpoints drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks VPC endpoint ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks vpc endpoint id") + } getReq.VpcEndpointId = args[0] response, err := a.VpcEndpoints.Get(ctx, getReq) diff --git a/cmd/account/workspaces/workspaces.go b/cmd/account/workspaces/workspaces.go index 9edf17994..96ac33b69 100755 --- a/cmd/account/workspaces/workspaces.go +++ b/cmd/account/workspaces/workspaces.go @@ -185,16 +185,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." + names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Workspace ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have workspace id") + } _, err = fmt.Sscan(args[0], &deleteReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -262,16 +274,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." + names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Workspace ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have workspace id") + } _, err = fmt.Sscan(args[0], &getReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) @@ -500,16 +524,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustAccountClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." + names, err := a.Workspaces.WorkspaceWorkspaceNameToWorkspaceIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspaces drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Workspace ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have workspace id") + } _, err = fmt.Sscan(args[0], &updateReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) diff --git a/cmd/workspace/alerts/alerts.go b/cmd/workspace/alerts/alerts.go index b96e240a6..7c98f7ee8 100755 --- a/cmd/workspace/alerts/alerts.go +++ b/cmd/workspace/alerts/alerts.go @@ -135,16 +135,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down." + names, err := w.Alerts.AlertNameToIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } deleteReq.AlertId = args[0] err = w.Alerts.Delete(ctx, deleteReq) @@ -196,16 +208,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ALERT_ID argument specified. Loading names for Alerts drop-down." + names, err := w.Alerts.AlertNameToIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Alerts drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } getReq.AlertId = args[0] response, err := w.Alerts.Get(ctx, getReq) diff --git a/cmd/workspace/cluster-policies/cluster-policies.go b/cmd/workspace/cluster-policies/cluster-policies.go index c4f00e52b..74a092cdb 100755 --- a/cmd/workspace/cluster-policies/cluster-policies.go +++ b/cmd/workspace/cluster-policies/cluster-policies.go @@ -3,6 +3,8 @@ package cluster_policies import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -162,14 +164,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -181,6 +175,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the policy to delete") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the policy to delete") + } deleteReq.PolicyId = args[0] } @@ -314,16 +325,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Canonical unique identifier for the cluster policy") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have canonical unique identifier for the cluster policy") + } getReq.PolicyId = args[0] response, err := w.ClusterPolicies.Get(ctx, getReq) diff --git a/cmd/workspace/clusters/clusters.go b/cmd/workspace/clusters/clusters.go index 432fbff4d..861730632 100755 --- a/cmd/workspace/clusters/clusters.go +++ b/cmd/workspace/clusters/clusters.go @@ -3,6 +3,7 @@ package clusters import ( + "fmt" "time" "github.com/databricks/cli/cmd/root" @@ -292,14 +293,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -311,6 +304,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster to be terminated") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster to be terminated") + } deleteReq.ClusterId = args[0] } @@ -516,14 +526,6 @@ func newEvents() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -535,6 +537,23 @@ func newEvents() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the cluster to retrieve events about") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the cluster to retrieve events about") + } eventsReq.ClusterId = args[0] } @@ -593,16 +612,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster about which to retrieve information") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster about which to retrieve information") + } getReq.ClusterId = args[0] response, err := w.Clusters.Get(ctx, getReq) @@ -841,14 +872,6 @@ func newPermanentDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -860,6 +883,23 @@ func newPermanentDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster to be deleted") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster to be deleted") + } permanentDeleteReq.ClusterId = args[0] } @@ -916,14 +956,6 @@ func newPin() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -935,6 +967,23 @@ func newPin() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } pinReq.ClusterId = args[0] } @@ -998,14 +1047,6 @@ func newResize() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1017,6 +1058,23 @@ func newResize() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster to be resized") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster to be resized") + } resizeReq.ClusterId = args[0] } @@ -1091,14 +1149,6 @@ func newRestart() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1110,6 +1160,23 @@ func newRestart() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster to be started") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster to be started") + } restartReq.ClusterId = args[0] } @@ -1237,14 +1304,6 @@ func newStart() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1256,6 +1315,23 @@ func newStart() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster to be started") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster to be started") + } startReq.ClusterId = args[0] } @@ -1324,14 +1400,6 @@ func newUnpin() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1343,6 +1411,23 @@ func newUnpin() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } unpinReq.ClusterId = args[0] } diff --git a/cmd/workspace/connections/connections.go b/cmd/workspace/connections/connections.go index 89636b594..966c5666c 100755 --- a/cmd/workspace/connections/connections.go +++ b/cmd/workspace/connections/connections.go @@ -146,16 +146,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME_ARG argument specified. Loading names for Connections drop-down." + names, err := w.Connections.ConnectionInfoNameToFullNameMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Connections drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The name of the connection to be deleted") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the name of the connection to be deleted") + } deleteReq.NameArg = args[0] err = w.Connections.Delete(ctx, deleteReq) @@ -207,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME_ARG argument specified. Loading names for Connections drop-down." + names, err := w.Connections.ConnectionInfoNameToFullNameMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Connections drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the connection") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the connection") + } getReq.NameArg = args[0] response, err := w.Connections.Get(ctx, getReq) diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index 03796c2f2..3c48dc1b7 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -136,16 +136,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." + names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } deleteReq.DashboardId = args[0] err = w.Dashboards.Delete(ctx, deleteReq) @@ -198,16 +210,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." + names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } getReq.DashboardId = args[0] response, err := w.Dashboards.Get(ctx, getReq) @@ -336,16 +360,28 @@ func newRestore() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No DASHBOARD_ID argument specified. Loading names for Dashboards drop-down." + names, err := w.Dashboards.DashboardNameToIdMap(ctx, sql.ListDashboardsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Dashboards drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } restoreReq.DashboardId = args[0] err = w.Dashboards.Restore(ctx, restoreReq) diff --git a/cmd/workspace/functions/functions.go b/cmd/workspace/functions/functions.go index 8b4a50ec5..02c8531db 100755 --- a/cmd/workspace/functions/functions.go +++ b/cmd/workspace/functions/functions.go @@ -146,16 +146,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." + names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + } deleteReq.Name = args[0] err = w.Functions.Delete(ctx, deleteReq) @@ -213,16 +225,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." + names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + } getReq.Name = args[0] response, err := w.Functions.Get(ctx, getReq) @@ -351,16 +375,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Functions drop-down." + names, err := w.Functions.FunctionInfoNameToFullNameMap(ctx, catalog.ListFunctionsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Functions drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the fully-qualified name of the function (of the form __catalog_name__.__schema_name__.__function__name__)") + } updateReq.Name = args[0] response, err := w.Functions.Update(ctx, updateReq) diff --git a/cmd/workspace/git-credentials/git-credentials.go b/cmd/workspace/git-credentials/git-credentials.go index 7e61b4c4f..8d5c59ed8 100755 --- a/cmd/workspace/git-credentials/git-credentials.go +++ b/cmd/workspace/git-credentials/git-credentials.go @@ -143,16 +143,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." + names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding credential to access") + } _, err = fmt.Sscan(args[0], &deleteReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) @@ -207,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." + names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding credential to access") + } _, err = fmt.Sscan(args[0], &getReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) @@ -324,16 +348,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CREDENTIAL_ID argument specified. Loading names for Git Credentials drop-down." + names, err := w.GitCredentials.CredentialInfoGitProviderToCredentialIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Git Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding credential to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding credential to access") + } _, err = fmt.Sscan(args[0], &updateReq.CredentialId) if err != nil { return fmt.Errorf("invalid CREDENTIAL_ID: %s", args[0]) diff --git a/cmd/workspace/global-init-scripts/global-init-scripts.go b/cmd/workspace/global-init-scripts/global-init-scripts.go index e7d734f06..12c49a513 100755 --- a/cmd/workspace/global-init-scripts/global-init-scripts.go +++ b/cmd/workspace/global-init-scripts/global-init-scripts.go @@ -3,6 +3,8 @@ package global_init_scripts import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -142,16 +144,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No SCRIPT_ID argument specified. Loading names for Global Init Scripts drop-down." + names, err := w.GlobalInitScripts.GlobalInitScriptDetailsNameToScriptIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Global Init Scripts drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the global init script") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the global init script") + } deleteReq.ScriptId = args[0] err = w.GlobalInitScripts.Delete(ctx, deleteReq) @@ -203,16 +217,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No SCRIPT_ID argument specified. Loading names for Global Init Scripts drop-down." + names, err := w.GlobalInitScripts.GlobalInitScriptDetailsNameToScriptIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Global Init Scripts drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the global init script") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the global init script") + } getReq.ScriptId = args[0] response, err := w.GlobalInitScripts.Get(ctx, getReq) diff --git a/cmd/workspace/groups/groups.go b/cmd/workspace/groups/groups.go index 0ef9a2696..48a9c9c68 100755 --- a/cmd/workspace/groups/groups.go +++ b/cmd/workspace/groups/groups.go @@ -3,6 +3,8 @@ package groups import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -145,16 +147,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." + names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks workspace") + } deleteReq.Id = args[0] err = w.Groups.Delete(ctx, deleteReq) @@ -206,16 +220,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." + names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks workspace") + } getReq.Id = args[0] response, err := w.Groups.Get(ctx, getReq) @@ -352,11 +378,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -368,6 +389,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." + names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a group in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a group in the databricks workspace") + } patchReq.Id = args[0] err = w.Groups.Patch(ctx, patchReq) @@ -430,14 +468,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -449,6 +479,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Groups drop-down." + names, err := w.Groups.GroupDisplayNameToIdMap(ctx, iam.ListGroupsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Groups drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks group ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks group id") + } updateReq.Id = args[0] } diff --git a/cmd/workspace/instance-pools/instance-pools.go b/cmd/workspace/instance-pools/instance-pools.go index e1e3cd21d..2a95437fb 100755 --- a/cmd/workspace/instance-pools/instance-pools.go +++ b/cmd/workspace/instance-pools/instance-pools.go @@ -3,6 +3,8 @@ package instance_pools import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -164,14 +166,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -183,6 +177,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool to be terminated") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool to be terminated") + } deleteReq.InstancePoolId = args[0] } @@ -323,16 +334,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical unique identifier for the instance pool") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical unique identifier for the instance pool") + } getReq.InstancePoolId = args[0] response, err := w.InstancePools.Get(ctx, getReq) diff --git a/cmd/workspace/ip-access-lists/ip-access-lists.go b/cmd/workspace/ip-access-lists/ip-access-lists.go index 081cb385c..7bda0ef02 100755 --- a/cmd/workspace/ip-access-lists/ip-access-lists.go +++ b/cmd/workspace/ip-access-lists/ip-access-lists.go @@ -159,16 +159,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Ip Access Lists drop-down." + names, err := w.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list to modify") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding ip access list to modify") + } deleteReq.IpAccessListId = args[0] err = w.IpAccessLists.Delete(ctx, deleteReq) @@ -220,16 +232,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No IP_ACCESS_LIST_ID argument specified. Loading names for Ip Access Lists drop-down." + names, err := w.IpAccessLists.IpAccessListInfoLabelToListIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Ip Access Lists drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding IP access list to modify") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding ip access list to modify") + } getReq.IpAccessListId = args[0] response, err := w.IpAccessLists.Get(ctx, getReq) diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index 49d7edbd1..640f40016 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -80,14 +80,6 @@ func newCancelAllRuns() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -99,6 +91,23 @@ func newCancelAllRuns() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to cancel all runs of") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the job to cancel all runs of") + } _, err = fmt.Sscan(args[0], &cancelAllRunsReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -162,14 +171,6 @@ func newCancelRun() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -181,6 +182,23 @@ func newCancelRun() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "This field is required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have this field is required") + } _, err = fmt.Sscan(args[0], &cancelRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -330,14 +348,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -349,6 +359,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to delete") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the job to delete") + } _, err = fmt.Sscan(args[0], &deleteReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -406,14 +433,6 @@ func newDeleteRun() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -425,6 +444,23 @@ func newDeleteRun() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the run for which to retrieve the metadata") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the run for which to retrieve the metadata") + } _, err = fmt.Sscan(args[0], &deleteRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -482,16 +518,28 @@ func newExportRun() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier for the run") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier for the run") + } _, err = fmt.Sscan(args[0], &exportRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -546,16 +594,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to retrieve information about") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the job to retrieve information about") + } _, err = fmt.Sscan(args[0], &getReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -617,16 +677,28 @@ func newGetRun() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the run for which to retrieve the metadata") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the run for which to retrieve the metadata") + } _, err = fmt.Sscan(args[0], &getRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -690,16 +762,28 @@ func newGetRunOutput() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier for the run") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier for the run") + } _, err = fmt.Sscan(args[0], &getRunOutputReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -937,14 +1021,6 @@ func newRepairRun() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -956,6 +1032,23 @@ func newRepairRun() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No RUN_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job run ID of the run to repair") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job run id of the run to repair") + } _, err = fmt.Sscan(args[0], &repairRunReq.RunId) if err != nil { return fmt.Errorf("invalid RUN_ID: %s", args[0]) @@ -1114,14 +1207,6 @@ func newRunNow() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1133,6 +1218,23 @@ func newRunNow() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the job to be executed") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the job to be executed") + } _, err = fmt.Sscan(args[0], &runNowReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) @@ -1323,14 +1425,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -1342,6 +1436,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The canonical identifier of the job to update") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the canonical identifier of the job to update") + } _, err = fmt.Sscan(args[0], &updateReq.JobId) if err != nil { return fmt.Errorf("invalid JOB_ID: %s", args[0]) diff --git a/cmd/workspace/metastores/metastores.go b/cmd/workspace/metastores/metastores.go index 4bed9fd17..274869884 100755 --- a/cmd/workspace/metastores/metastores.go +++ b/cmd/workspace/metastores/metastores.go @@ -265,16 +265,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." + names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id of the metastore") + } deleteReq.Id = args[0] err = w.Metastores.Delete(ctx, deleteReq) @@ -407,16 +419,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." + names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id of the metastore") + } getReq.Id = args[0] response, err := w.Metastores.Get(ctx, getReq) @@ -641,16 +665,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Metastores drop-down." + names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID of the metastore") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id of the metastore") + } updateReq.Id = args[0] response, err := w.Metastores.Update(ctx, updateReq) @@ -708,16 +744,28 @@ func newUpdateAssignment() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Metastores drop-down." + names, err := w.Metastores.MetastoreInfoNameToMetastoreIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Metastores drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "A workspace ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have a workspace id") + } _, err = fmt.Sscan(args[0], &updateAssignmentReq.WorkspaceId) if err != nil { return fmt.Errorf("invalid WORKSPACE_ID: %s", args[0]) diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 652af8987..708343b2e 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -147,16 +147,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } deleteReq.PipelineId = args[0] err = w.Pipelines.Delete(ctx, deleteReq) @@ -211,16 +223,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } getReq.PipelineId = args[0] response, err := w.Pipelines.Get(ctx, getReq) @@ -341,11 +365,6 @@ func newListPipelineEvents() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -357,6 +376,23 @@ func newListPipelineEvents() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } listPipelineEventsReq.PipelineId = args[0] response, err := w.Pipelines.ListPipelineEventsAll(ctx, listPipelineEventsReq) @@ -489,16 +525,28 @@ func newListUpdates() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline to return updates for") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline to return updates for") + } listUpdatesReq.PipelineId = args[0] response, err := w.Pipelines.ListUpdates(ctx, listUpdatesReq) @@ -555,16 +603,28 @@ func newReset() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } resetReq.PipelineId = args[0] wait, err := w.Pipelines.Reset(ctx, resetReq) @@ -635,11 +695,6 @@ func newStartUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -651,6 +706,23 @@ func newStartUpdate() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } startUpdateReq.PipelineId = args[0] response, err := w.Pipelines.StartUpdate(ctx, startUpdateReq) @@ -707,16 +779,28 @@ func newStop() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } stopReq.PipelineId = args[0] wait, err := w.Pipelines.Stop(ctx, stopReq) @@ -802,14 +886,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -821,6 +897,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique identifier for this pipeline") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique identifier for this pipeline") + } updateReq.PipelineId = args[0] } diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index e5a41e128..e893f3303 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -141,16 +141,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." + names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the provider") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the provider") + } deleteReq.Name = args[0] err = w.Providers.Delete(ctx, deleteReq) @@ -204,16 +216,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." + names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the provider") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the provider") + } getReq.Name = args[0] response, err := w.Providers.Get(ctx, getReq) @@ -344,16 +368,28 @@ func newListShares() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." + names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the provider in which to list shares") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the provider in which to list shares") + } listSharesReq.Name = args[0] response, err := w.Providers.ListSharesAll(ctx, listSharesReq) @@ -415,14 +451,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -434,6 +462,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Providers drop-down." + names, err := w.Providers.ProviderInfoNameToMetastoreIdMap(ctx, sharing.ListProvidersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Providers drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The name of the Provider") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the name of the provider") + } updateReq.Name = args[0] } diff --git a/cmd/workspace/queries/queries.go b/cmd/workspace/queries/queries.go index b1c94ddcd..3512adaaf 100755 --- a/cmd/workspace/queries/queries.go +++ b/cmd/workspace/queries/queries.go @@ -145,16 +145,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." + names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } deleteReq.QueryId = args[0] err = w.Queries.Delete(ctx, deleteReq) @@ -207,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." + names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } getReq.QueryId = args[0] response, err := w.Queries.Get(ctx, getReq) @@ -347,16 +371,28 @@ func newRestore() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." + names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } restoreReq.QueryId = args[0] err = w.Queries.Restore(ctx, restoreReq) @@ -418,11 +454,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -434,6 +465,23 @@ func newUpdate() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No QUERY_ID argument specified. Loading names for Queries drop-down." + names, err := w.Queries.QueryNameToIdMap(ctx, sql.ListQueriesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Queries drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have ") + } updateReq.QueryId = args[0] response, err := w.Queries.Update(ctx, updateReq) diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index 10430cdf2..c96e5fc59 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -146,16 +146,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." + names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the recipient") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the recipient") + } deleteReq.Name = args[0] err = w.Recipients.Delete(ctx, deleteReq) @@ -209,16 +221,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." + names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the recipient") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the recipient") + } getReq.Name = args[0] response, err := w.Recipients.Get(ctx, getReq) @@ -414,16 +438,28 @@ func newSharePermissions() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." + names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The name of the Recipient") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the name of the recipient") + } sharePermissionsReq.Name = args[0] response, err := w.Recipients.SharePermissions(ctx, sharePermissionsReq) @@ -485,14 +521,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -504,6 +532,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Recipients drop-down." + names, err := w.Recipients.RecipientInfoNameToMetastoreIdMap(ctx, sharing.ListRecipientsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Recipients drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of Recipient") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of recipient") + } updateReq.Name = args[0] } diff --git a/cmd/workspace/repos/repos.go b/cmd/workspace/repos/repos.go index 087a62449..b1e003717 100755 --- a/cmd/workspace/repos/repos.go +++ b/cmd/workspace/repos/repos.go @@ -147,16 +147,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding repo to access") + } _, err = fmt.Sscan(args[0], &deleteReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) @@ -211,16 +223,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding repo to access") + } _, err = fmt.Sscan(args[0], &getReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) @@ -358,11 +382,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -374,6 +393,23 @@ func newUpdate() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID for the corresponding repo to access") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id for the corresponding repo to access") + } _, err = fmt.Sscan(args[0], &updateReq.RepoId) if err != nil { return fmt.Errorf("invalid REPO_ID: %s", args[0]) diff --git a/cmd/workspace/schemas/schemas.go b/cmd/workspace/schemas/schemas.go index e1ad7be4c..fddf986de 100755 --- a/cmd/workspace/schemas/schemas.go +++ b/cmd/workspace/schemas/schemas.go @@ -3,6 +3,8 @@ package schemas import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -142,16 +144,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." + names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the schema") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the schema") + } deleteReq.FullName = args[0] err = w.Schemas.Delete(ctx, deleteReq) @@ -205,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." + names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the schema") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the schema") + } getReq.FullName = args[0] response, err := w.Schemas.Get(ctx, getReq) @@ -342,11 +368,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -358,6 +379,23 @@ func newUpdate() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Schemas drop-down." + names, err := w.Schemas.SchemaInfoNameToFullNameMap(ctx, catalog.ListSchemasRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Schemas drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the schema") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the schema") + } updateReq.FullName = args[0] response, err := w.Schemas.Update(ctx, updateReq) diff --git a/cmd/workspace/service-principals/service-principals.go b/cmd/workspace/service-principals/service-principals.go index 787ca29ef..f30a92d4b 100755 --- a/cmd/workspace/service-principals/service-principals.go +++ b/cmd/workspace/service-principals/service-principals.go @@ -3,6 +3,8 @@ package service_principals import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -143,16 +145,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." + names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") + } deleteReq.Id = args[0] err = w.ServicePrincipals.Delete(ctx, deleteReq) @@ -205,16 +219,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." + names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") + } getReq.Id = args[0] response, err := w.ServicePrincipals.Get(ctx, getReq) @@ -352,11 +378,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -368,6 +389,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." + names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a service principal in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a service principal in the databricks workspace") + } patchReq.Id = args[0] err = w.ServicePrincipals.Patch(ctx, patchReq) @@ -432,14 +470,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -451,6 +481,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Service Principals drop-down." + names, err := w.ServicePrincipals.ServicePrincipalDisplayNameToIdMap(ctx, iam.ListServicePrincipalsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Service Principals drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks service principal ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks service principal id") + } updateReq.Id = args[0] } diff --git a/cmd/workspace/storage-credentials/storage-credentials.go b/cmd/workspace/storage-credentials/storage-credentials.go index 337fddcfe..b5dd5141b 100755 --- a/cmd/workspace/storage-credentials/storage-credentials.go +++ b/cmd/workspace/storage-credentials/storage-credentials.go @@ -3,6 +3,8 @@ package storage_credentials import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -160,16 +162,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." + names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the storage credential") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the storage credential") + } deleteReq.Name = args[0] err = w.StorageCredentials.Delete(ctx, deleteReq) @@ -223,16 +237,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." + names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of the storage credential") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of the storage credential") + } getReq.Name = args[0] response, err := w.StorageCredentials.Get(ctx, getReq) @@ -351,14 +377,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -370,6 +388,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No NAME argument specified. Loading names for Storage Credentials drop-down." + names, err := w.StorageCredentials.StorageCredentialInfoNameToIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Storage Credentials drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The credential name") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the credential name") + } updateReq.Name = args[0] } diff --git a/cmd/workspace/tables/tables.go b/cmd/workspace/tables/tables.go index b7b45de46..53a153fcf 100755 --- a/cmd/workspace/tables/tables.go +++ b/cmd/workspace/tables/tables.go @@ -3,6 +3,8 @@ package tables import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/databricks-sdk-go/service/catalog" @@ -68,16 +70,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the table") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the table") + } deleteReq.FullName = args[0] err = w.Tables.Delete(ctx, deleteReq) @@ -135,16 +149,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the table") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the table") + } getReq.FullName = args[0] response, err := w.Tables.Get(ctx, getReq) @@ -282,16 +308,28 @@ func newListSummaries() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CATALOG_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Name of parent catalog for tables of interest") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have name of parent catalog for tables of interest") + } listSummariesReq.CatalogName = args[0] response, err := w.Tables.ListSummariesAll(ctx, listSummariesReq) @@ -352,16 +390,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Tables drop-down." + names, err := w.Tables.TableInfoNameToTableIdMap(ctx, catalog.ListTablesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tables drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Full name of the table") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have full name of the table") + } updateReq.FullName = args[0] err = w.Tables.Update(ctx, updateReq) diff --git a/cmd/workspace/token-management/token-management.go b/cmd/workspace/token-management/token-management.go index afd8fdb9e..dcee2f0ee 100755 --- a/cmd/workspace/token-management/token-management.go +++ b/cmd/workspace/token-management/token-management.go @@ -140,16 +140,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No TOKEN_ID argument specified. Loading names for Token Management drop-down." + names, err := w.TokenManagement.TokenInfoCommentToTokenIdMap(ctx, settings.ListTokenManagementRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Token Management drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the token to get") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the token to get") + } deleteReq.TokenId = args[0] err = w.TokenManagement.Delete(ctx, deleteReq) @@ -201,16 +213,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No TOKEN_ID argument specified. Loading names for Token Management drop-down." + names, err := w.TokenManagement.TokenInfoCommentToTokenIdMap(ctx, settings.ListTokenManagementRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Token Management drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the token to get") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the token to get") + } getReq.TokenId = args[0] response, err := w.TokenManagement.Get(ctx, getReq) diff --git a/cmd/workspace/tokens/tokens.go b/cmd/workspace/tokens/tokens.go index 1e6ea7141..eee64c976 100755 --- a/cmd/workspace/tokens/tokens.go +++ b/cmd/workspace/tokens/tokens.go @@ -3,6 +3,8 @@ package tokens import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -141,14 +143,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -160,6 +154,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No TOKEN_ID argument specified. Loading names for Tokens drop-down." + names, err := w.Tokens.TokenInfoCommentToTokenIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Tokens drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The ID of the token to be revoked") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the id of the token to be revoked") + } deleteReq.TokenId = args[0] } diff --git a/cmd/workspace/users/users.go b/cmd/workspace/users/users.go index e81beb02f..2dfbf6e86 100755 --- a/cmd/workspace/users/users.go +++ b/cmd/workspace/users/users.go @@ -3,6 +3,8 @@ package users import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -152,16 +154,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Users drop-down." + names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks workspace") + } deleteReq.Id = args[0] err = w.Users.Delete(ctx, deleteReq) @@ -213,16 +227,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Users drop-down." + names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks workspace") + } getReq.Id = args[0] response, err := w.Users.Get(ctx, getReq) @@ -360,11 +386,6 @@ func newPatch() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -376,6 +397,23 @@ func newPatch() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Users drop-down." + names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Unique ID for a user in the Databricks workspace") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have unique id for a user in the databricks workspace") + } patchReq.Id = args[0] err = w.Users.Patch(ctx, patchReq) @@ -440,14 +478,6 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -459,6 +489,23 @@ func newUpdate() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Users drop-down." + names, err := w.Users.UserUserNameToIdMap(ctx, iam.ListUsersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Users drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Databricks user ID") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have databricks user id") + } updateReq.Id = args[0] } diff --git a/cmd/workspace/volumes/volumes.go b/cmd/workspace/volumes/volumes.go index 72c1ff7c4..2d2026820 100755 --- a/cmd/workspace/volumes/volumes.go +++ b/cmd/workspace/volumes/volumes.go @@ -172,16 +172,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." + names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") + } deleteReq.FullNameArg = args[0] err = w.Volumes.Delete(ctx, deleteReq) @@ -310,16 +322,28 @@ func newRead() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." + names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") + } readReq.FullNameArg = args[0] response, err := w.Volumes.Read(ctx, readReq) @@ -382,16 +406,28 @@ func newUpdate() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME_ARG argument specified. Loading names for Volumes drop-down." + names, err := w.Volumes.VolumeInfoNameToVolumeIdMap(ctx, catalog.ListVolumesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Volumes drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the volume") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the volume") + } updateReq.FullNameArg = args[0] response, err := w.Volumes.Update(ctx, updateReq) diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 1d7dde033..378b931db 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -172,16 +172,28 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have required") + } deleteReq.Id = args[0] err = w.Warehouses.Delete(ctx, deleteReq) @@ -254,11 +266,6 @@ func newEdit() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -270,6 +277,23 @@ func newEdit() *cobra.Command { return err } } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have required") + } editReq.Id = args[0] wait, err := w.Warehouses.Edit(ctx, editReq) @@ -345,16 +369,28 @@ func newGet() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have required") + } getReq.Id = args[0] response, err := w.Warehouses.Get(ctx, getReq) @@ -617,16 +653,28 @@ func newStart() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have required") + } startReq.Id = args[0] wait, err := w.Warehouses.Start(ctx, startReq) @@ -702,16 +750,28 @@ func newStop() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "Required") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have required") + } stopReq.Id = args[0] wait, err := w.Warehouses.Stop(ctx, stopReq) diff --git a/cmd/workspace/workspace/workspace.go b/cmd/workspace/workspace/workspace.go index 153fffe43..aeca95253 100755 --- a/cmd/workspace/workspace/workspace.go +++ b/cmd/workspace/workspace/workspace.go @@ -3,6 +3,8 @@ package workspace import ( + "fmt" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" @@ -72,14 +74,6 @@ func newDelete() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -91,6 +85,23 @@ func newDelete() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." + names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The absolute path of the notebook or directory") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the absolute path of the notebook or directory") + } deleteReq.Path = args[0] } @@ -152,16 +163,28 @@ func newExport() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() w := root.WorkspaceClient(ctx) + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." + names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The absolute path of the object or directory") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the absolute path of the object or directory") + } exportReq.Path = args[0] response, err := w.Workspace.Export(ctx, exportReq) @@ -428,14 +451,6 @@ func newMkdirs() *cobra.Command { cmd.Annotations = make(map[string]string) - cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(1) - if cmd.Flags().Changed("json") { - check = cobra.ExactArgs(0) - } - return check(cmd, args) - } - cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() @@ -447,6 +462,23 @@ func newMkdirs() *cobra.Command { return err } } else { + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PATH argument specified. Loading names for Workspace drop-down." + names, err := w.Workspace.ObjectInfoPathToObjectIdMap(ctx, workspace.ListWorkspaceRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Workspace drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The absolute path of the directory") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the absolute path of the directory") + } mkdirsReq.Path = args[0] } From e4b66833dd78ad6ab6b0cd71deb7cf0f364b1108 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 27 Jul 2023 15:24:34 +0200 Subject: [PATCH 028/139] Fix mkdir integration test on GCP (#620) GCP returns a different error here incase of a conflict. This PR fixes the test. --- internal/fs_mkdir_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/fs_mkdir_test.go b/internal/fs_mkdir_test.go index 137750e28..32d1d9c8b 100644 --- a/internal/fs_mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -111,7 +111,7 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) - // Different backends return different errors (for example: file in s3 vs dbfs) - regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file`) + // Different cloud providers return different errors. + regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) assert.Regexp(t, regex, err.Error()) } From 2f4bf844fc2465e2f3ca64ddcc6e4588dd2319fd Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 27 Jul 2023 15:51:57 +0200 Subject: [PATCH 029/139] Fix git clone integration test for non-existing repo (#610) ## Changes This PR changes the integration test to just check an error is returned rather than asserting specific text is present in the error. This is required because the error returned can be different based on whether git ssh keys have been setup. --- internal/git_clone_test.go | 7 ++++--- libs/git/clone.go | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/internal/git_clone_test.go b/internal/git_clone_test.go index b280ebc7d..3fb69b924 100644 --- a/internal/git_clone_test.go +++ b/internal/git_clone_test.go @@ -53,11 +53,12 @@ func TestAccGitCloneWithOnlyRepoNameOnAlternateBranch(t *testing.T) { assert.Contains(t, string(b), "dais-2022") } -func TestAccGitCloneRepositoryDoesNotExist(t *testing.T) { +func TestAccGitCloneErrorsWhenRepositoryDoesNotExist(t *testing.T) { t.Log(GetEnvOrSkipTest(t, "CLOUD_ENV")) tmpDir := t.TempDir() - err := git.Clone(context.Background(), "doesnot-exist", "", tmpDir) - assert.Contains(t, err.Error(), `repository 'https://github.com/databricks/doesnot-exist/' not found`) + err := git.Clone(context.Background(), "https://github.com/monalisa/doesnot-exist.git", "", tmpDir) + // Expect the error to originate from shelling out to `git clone` + assert.ErrorContains(t, err, "git clone failed:") } diff --git a/libs/git/clone.go b/libs/git/clone.go index ec663272d..8b075cdea 100644 --- a/libs/git/clone.go +++ b/libs/git/clone.go @@ -60,7 +60,7 @@ func Clone(ctx context.Context, url, reference, targetPath string) error { return fmt.Errorf("please install git CLI to clone a repository: %w", err) } if err != nil { - return err + return fmt.Errorf("git clone failed: %w", err) } // wait for git clone to complete From 28bfc0c73df282decf1764ef0ec0b320336555ec Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 27 Jul 2023 16:19:40 +0200 Subject: [PATCH 030/139] Remove push to main trigger for build workflow (#621) ## Changes Commits going through the merge queue are tested there using their final SHA as if they were already in main. The push-to-main trigger therefore duplicates the builds that were already triggered from the merge queue. ## Tests ![Screenshot 2023-07-27 at 15 37 17](https://github.com/databricks/cli/assets/9845/ff7af5dd-0d2c-48c2-89b2-7ecf3d121071) --- .github/workflows/push.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 1dcf3eaf3..c47cfc726 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -3,8 +3,6 @@ name: build on: pull_request: types: [opened, synchronize] - push: - branches: [main] merge_group: types: [checks_requested] From 5deca8d7d85b1a3696979c2fb9c675783b198af2 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 27 Jul 2023 16:19:50 +0200 Subject: [PATCH 031/139] Remove workflow to publish binaries to S3 (#622) ## Changes Binaries are published through GitHub releases now. --- .github/workflows/publish-latest.yml | 29 ---------------------------- .github/workflows/release.yml | 5 ----- 2 files changed, 34 deletions(-) delete mode 100644 .github/workflows/publish-latest.yml diff --git a/.github/workflows/publish-latest.yml b/.github/workflows/publish-latest.yml deleted file mode 100644 index 921edfd39..000000000 --- a/.github/workflows/publish-latest.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: publish-latest - -on: - workflow_dispatch: - - workflow_call: - -jobs: - publish: - runs-on: ubuntu-22.04 - - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - ref: release-s3 - - - name: Install s3cmd - run: | - sudo apt-get update - sudo apt-get install s3cmd - - - name: Publish to S3 - working-directory: ./scripts - run: ./publish_to_s3.sh - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cb4847cae..c1ecef011 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -46,8 +46,3 @@ jobs: args: release env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - publish: - uses: ./.github/workflows/publish-latest.yml - needs: goreleaser - secrets: inherit From d55652be07071e119a9f9ba498a01c1555aaaff2 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Sun, 30 Jul 2023 09:19:49 +0200 Subject: [PATCH 032/139] Extend deployment mode support (#577) ## Changes This adds `mode: production` option. This mode doesn't do any transformations but verifies that an environment is configured correctly for production: ``` environments: prod: mode: production # paths should not be scoped to a user (unless a service principal is used) root_path: /Shared/non_user_path/... # run_as and permissions should be set at the resource level (or at the top level when that is implemented) run_as: user_name: Alice permissions: - level: CAN_MANAGE user_name: Alice ``` Additionally, this extends the existing `mode: development` option, * now prefixing deployed assets with `[dev your.user]` instead of just `[dev`] * validating that development deployments _are_ scoped to a user ## Related https://github.com/databricks/cli/pull/578/files (in draft) ## Tests Manual testing to validate the experience, error messages, and functionality with all resource types. Automated unit tests. --------- Co-authored-by: Fabian Jakobs --- bundle/config/environment.go | 10 +- .../mutator/expand_workspace_root_test.go | 18 +- .../config/mutator/populate_current_user.go | 21 ++- .../mutator/populate_current_user_test.go | 39 ++++- .../mutator/process_environment_mode.go | 113 ++++++++++++- .../mutator/process_environment_mode_test.go | 154 ++++++++++++++---- bundle/config/workspace.go | 9 +- bundle/deploy/files/sync.go | 7 +- bundle/tests/job_and_pipeline/databricks.yml | 1 + internal/sync_test.go | 10 +- libs/sync/path.go | 14 +- libs/sync/sync.go | 5 +- 12 files changed, 340 insertions(+), 61 deletions(-) diff --git a/bundle/config/environment.go b/bundle/config/environment.go index 06a8d8909..c1f4f4ad7 100644 --- a/bundle/config/environment.go +++ b/bundle/config/environment.go @@ -32,7 +32,13 @@ type Environment struct { } const ( - // Right now, we just have a default / "" mode and a "development" mode. - // Additional modes are expected to come for pull-requests and production. + // Development mode: deployments done purely for running things in development. + // Any deployed resources will be marked as "dev" and might be hidden or cleaned up. Development Mode = "development" + + // Production mode: deployments done for production purposes. + // Any deployed resources will not be changed but this mode will enable + // various strictness checks to make sure that a deployment is correctly setup + // for production purposes. + Production Mode = "production" ) diff --git a/bundle/config/mutator/expand_workspace_root_test.go b/bundle/config/mutator/expand_workspace_root_test.go index e872dc835..0ec11a07d 100644 --- a/bundle/config/mutator/expand_workspace_root_test.go +++ b/bundle/config/mutator/expand_workspace_root_test.go @@ -16,8 +16,10 @@ func TestExpandWorkspaceRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, RootPath: "~/foo", }, @@ -32,8 +34,10 @@ func TestExpandWorkspaceRootDoesNothing(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, RootPath: "/Users/charly@doe.com/foo", }, @@ -48,8 +52,10 @@ func TestExpandWorkspaceRootWithoutRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ - CurrentUser: &iam.User{ - UserName: "jane@doe.com", + CurrentUser: &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, }, }, }, diff --git a/bundle/config/mutator/populate_current_user.go b/bundle/config/mutator/populate_current_user.go index 34c6ff6e3..cbaa2d30b 100644 --- a/bundle/config/mutator/populate_current_user.go +++ b/bundle/config/mutator/populate_current_user.go @@ -2,8 +2,11 @@ package mutator import ( "context" + "strings" + "unicode" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" ) type populateCurrentUser struct{} @@ -24,6 +27,22 @@ func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error return err } - b.Config.Workspace.CurrentUser = me + b.Config.Workspace.CurrentUser = &config.User{ + ShortName: getShortUserName(me.UserName), + User: me, + } return nil } + +// Get a short-form username, based on the user's primary email address. +// We leave the full range of unicode letters in tact, but remove all "special" characters, +// including dots, which are not supported in e.g. experiment names. +func getShortUserName(emailAddress string) string { + r := []rune(strings.Split(emailAddress, "@")[0]) + for i := 0; i < len(r); i++ { + if !unicode.IsLetter(r[i]) { + r[i] = '_' + } + } + return string(r) +} diff --git a/bundle/config/mutator/populate_current_user_test.go b/bundle/config/mutator/populate_current_user_test.go index 4c28d1cd3..79ec52b8f 100644 --- a/bundle/config/mutator/populate_current_user_test.go +++ b/bundle/config/mutator/populate_current_user_test.go @@ -1,3 +1,40 @@ package mutator -// We need to implement workspace client mocking to implement this test. +import "testing" + +func TestPopulateCurrentUser(t *testing.T) { + // We need to implement workspace client mocking to implement this test. +} + +func TestGetShortUserName(t *testing.T) { + tests := []struct { + name string + email string + expected string + }{ + { + name: "test alphanumeric characters", + email: "test.user@example.com", + expected: "test_user", + }, + { + name: "test unicode characters", + email: "tést.üser@example.com", + expected: "tést_üser", + }, + { + name: "test special characters", + email: "test$.user@example.com", + expected: "test__user", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := getShortUserName(tt.email) + if result != tt.expected { + t.Errorf("getShortUserName(%q) = %q; expected %q", tt.email, result, tt.expected) + } + }) + } +} diff --git a/bundle/config/mutator/process_environment_mode.go b/bundle/config/mutator/process_environment_mode.go index 3e1b7e819..65d8a6893 100644 --- a/bundle/config/mutator/process_environment_mode.go +++ b/bundle/config/mutator/process_environment_mode.go @@ -4,9 +4,11 @@ import ( "context" "fmt" "path" + "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" ) @@ -26,15 +28,17 @@ func (m *processEnvironmentMode) Name() string { // Mark all resources as being for 'development' purposes, i.e. // changing their their name, adding tags, and (in the future) // marking them as 'hidden' in the UI. -func processDevelopmentMode(b *bundle.Bundle) error { +func transformDevelopmentMode(b *bundle.Bundle) error { r := b.Config.Resources + prefix := "[dev " + b.Config.Workspace.CurrentUser.ShortName + "] " + for i := range r.Jobs { - r.Jobs[i].Name = "[dev] " + r.Jobs[i].Name + r.Jobs[i].Name = prefix + r.Jobs[i].Name if r.Jobs[i].Tags == nil { r.Jobs[i].Tags = make(map[string]string) } - r.Jobs[i].Tags["dev"] = "" + r.Jobs[i].Tags["dev"] = b.Config.Workspace.CurrentUser.DisplayName if r.Jobs[i].MaxConcurrentRuns == 0 { r.Jobs[i].MaxConcurrentRuns = developmentConcurrentRuns } @@ -50,13 +54,13 @@ func processDevelopmentMode(b *bundle.Bundle) error { } for i := range r.Pipelines { - r.Pipelines[i].Name = "[dev] " + r.Pipelines[i].Name + r.Pipelines[i].Name = prefix + r.Pipelines[i].Name r.Pipelines[i].Development = true // (pipelines don't yet support tags) } for i := range r.Models { - r.Models[i].Name = "[dev] " + r.Models[i].Name + r.Models[i].Name = prefix + r.Models[i].Name r.Models[i].Tags = append(r.Models[i].Tags, ml.ModelTag{Key: "dev", Value: ""}) } @@ -65,20 +69,111 @@ func processDevelopmentMode(b *bundle.Bundle) error { dir := path.Dir(filepath) base := path.Base(filepath) if dir == "." { - r.Experiments[i].Name = "[dev] " + base + r.Experiments[i].Name = prefix + base } else { - r.Experiments[i].Name = dir + "/[dev] " + base + r.Experiments[i].Name = dir + "/" + prefix + base } - r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: ""}) + r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: b.Config.Workspace.CurrentUser.DisplayName}) } return nil } +func validateDevelopmentMode(b *bundle.Bundle) error { + if path := findIncorrectPath(b, config.Development); path != "" { + return fmt.Errorf("%s must start with '~/' or contain the current username when using 'mode: development'", path) + } + return nil +} + +func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string { + username := b.Config.Workspace.CurrentUser.UserName + containsExpected := true + if mode == config.Production { + containsExpected = false + } + + if strings.Contains(b.Config.Workspace.RootPath, username) != containsExpected && b.Config.Workspace.RootPath != "" { + return "root_path" + } + if strings.Contains(b.Config.Workspace.StatePath, username) != containsExpected { + return "state_path" + } + if strings.Contains(b.Config.Workspace.FilesPath, username) != containsExpected { + return "files_path" + } + if strings.Contains(b.Config.Workspace.ArtifactsPath, username) != containsExpected { + return "artifacts_path" + } + return "" +} + +func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { + r := b.Config.Resources + for i := range r.Pipelines { + if r.Pipelines[i].Development { + return fmt.Errorf("environment with 'mode: production' cannot specify a pipeline with 'development: true'") + } + } + + if !isPrincipalUsed { + if path := findIncorrectPath(b, config.Production); path != "" { + message := "%s must not contain the current username when using 'mode: production'" + if path == "root_path" { + return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.environment}", path) + } else { + return fmt.Errorf(message, path) + } + } + + if !isRunAsSet(r) { + return fmt.Errorf("'run_as' must be set for all jobs when using 'mode: production'") + } + } + return nil +} + +// Determines whether a service principal identity is used to run the CLI. +func isServicePrincipalUsed(ctx context.Context, b *bundle.Bundle) (bool, error) { + ws := b.WorkspaceClient() + + // Check if a principal with the current user's ID exists. + // We need to use the ListAll method since Get is only usable by admins. + matches, err := ws.ServicePrincipals.ListAll(ctx, iam.ListServicePrincipalsRequest{ + Filter: "id eq " + b.Config.Workspace.CurrentUser.Id, + }) + if err != nil { + return false, err + } + return len(matches) > 0, nil +} + +// Determines whether run_as is explicitly set for all resources. +// We do this in a best-effort fashion rather than check the top-level +// 'run_as' field because the latter is not required to be set. +func isRunAsSet(r config.Resources) bool { + for i := range r.Jobs { + if r.Jobs[i].RunAs == nil { + return false + } + } + return true +} + func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) error { switch b.Config.Bundle.Mode { case config.Development: - return processDevelopmentMode(b) + err := validateDevelopmentMode(b) + if err != nil { + return err + } + return transformDevelopmentMode(b) + case config.Production: + isPrincipal, err := isServicePrincipalUsed(ctx, b) + if err != nil { + return err + } + return validateProductionMode(ctx, b, isPrincipal) case "": // No action default: diff --git a/bundle/config/mutator/process_environment_mode_test.go b/bundle/config/mutator/process_environment_mode_test.go index 5342de212..6f53abd89 100644 --- a/bundle/config/mutator/process_environment_mode_test.go +++ b/bundle/config/mutator/process_environment_mode_test.go @@ -1,13 +1,15 @@ -package mutator_test +package mutator import ( "context" + "reflect" + "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/iam" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/pipelines" @@ -15,11 +17,23 @@ import ( "github.com/stretchr/testify/require" ) -func TestProcessEnvironmentModeApplyDebug(t *testing.T) { - bundle := &bundle.Bundle{ +func mockBundle(mode config.Mode) *bundle.Bundle { + return &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ - Mode: config.Development, + Mode: mode, + }, + Workspace: config.Workspace{ + CurrentUser: &config.User{ + ShortName: "lennart", + User: &iam.User{ + UserName: "lennart@company.com", + Id: "1", + }, + }, + StatePath: "/Users/lennart@company.com/.bundle/x/y/state", + ArtifactsPath: "/Users/lennart@company.com/.bundle/x/y/artifacts", + FilesPath: "/Users/lennart@company.com/.bundle/x/y/files", }, Resources: config.Resources{ Jobs: map[string]*resources.Job{ @@ -38,40 +52,124 @@ func TestProcessEnvironmentModeApplyDebug(t *testing.T) { }, }, } +} - m := mutator.ProcessEnvironmentMode() +func TestProcessEnvironmentModeDevelopment(t *testing.T) { + bundle := mockBundle(config.Development) + + m := ProcessEnvironmentMode() err := m.Apply(context.Background(), bundle) require.NoError(t, err) - assert.Equal(t, "[dev] job1", bundle.Config.Resources.Jobs["job1"].Name) - assert.Equal(t, "[dev] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) - assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) - assert.Equal(t, "[dev] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) - assert.Equal(t, "[dev] model1", bundle.Config.Resources.Models["model1"].Name) + assert.Equal(t, "[dev lennart] job1", bundle.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "[dev lennart] pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) + assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) + assert.Equal(t, "[dev lennart] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) + assert.Equal(t, "[dev lennart] model1", bundle.Config.Resources.Models["model1"].Name) assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } -func TestProcessEnvironmentModeApplyDefault(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Bundle: config.Bundle{ - Mode: "", - }, - Resources: config.Resources{ - Jobs: map[string]*resources.Job{ - "job1": {JobSettings: &jobs.JobSettings{Name: "job1"}}, - }, - Pipelines: map[string]*resources.Pipeline{ - "pipeline1": {PipelineSpec: &pipelines.PipelineSpec{Name: "pipeline1"}}, - }, - }, - }, - } +func TestProcessEnvironmentModeDefault(t *testing.T) { + bundle := mockBundle("") - m := mutator.ProcessEnvironmentMode() + m := ProcessEnvironmentMode() err := m.Apply(context.Background(), bundle) require.NoError(t, err) assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } + +func TestProcessEnvironmentModeProduction(t *testing.T) { + bundle := mockBundle(config.Production) + + err := validateProductionMode(context.Background(), bundle, false) + require.ErrorContains(t, err, "state_path") + + bundle.Config.Workspace.StatePath = "/Shared/.bundle/x/y/state" + bundle.Config.Workspace.ArtifactsPath = "/Shared/.bundle/x/y/artifacts" + bundle.Config.Workspace.FilesPath = "/Shared/.bundle/x/y/files" + + err = validateProductionMode(context.Background(), bundle, false) + require.ErrorContains(t, err, "production") + + permissions := []resources.Permission{ + { + Level: "CAN_MANAGE", + UserName: "user@company.com", + }, + } + bundle.Config.Resources.Jobs["job1"].Permissions = permissions + bundle.Config.Resources.Jobs["job1"].RunAs = &jobs.JobRunAs{UserName: "user@company.com"} + bundle.Config.Resources.Pipelines["pipeline1"].Permissions = permissions + bundle.Config.Resources.Experiments["experiment1"].Permissions = permissions + bundle.Config.Resources.Experiments["experiment2"].Permissions = permissions + bundle.Config.Resources.Models["model1"].Permissions = permissions + + err = validateProductionMode(context.Background(), bundle, false) + require.NoError(t, err) + + assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) + assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) + assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) +} + +func TestProcessEnvironmentModeProductionOkForPrincipal(t *testing.T) { + bundle := mockBundle(config.Production) + + // Our environment has all kinds of problems when not using service principals ... + err := validateProductionMode(context.Background(), bundle, false) + require.Error(t, err) + + // ... but we're much less strict when a principal is used + err = validateProductionMode(context.Background(), bundle, true) + require.NoError(t, err) +} + +// Make sure that we have test coverage for all resource types +func TestAllResourcesMocked(t *testing.T) { + bundle := mockBundle(config.Development) + resources := reflect.ValueOf(bundle.Config.Resources) + + for i := 0; i < resources.NumField(); i++ { + field := resources.Field(i) + if field.Kind() == reflect.Map { + assert.True( + t, + !field.IsNil() && field.Len() > 0, + "process_environment_mode should support '%s' (please add it to process_environment_mode.go and extend the test suite)", + resources.Type().Field(i).Name, + ) + } + } +} + +// Make sure that we at least rename all resources +func TestAllResourcesRenamed(t *testing.T) { + bundle := mockBundle(config.Development) + resources := reflect.ValueOf(bundle.Config.Resources) + + m := ProcessEnvironmentMode() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + + for i := 0; i < resources.NumField(); i++ { + field := resources.Field(i) + + if field.Kind() == reflect.Map { + for _, key := range field.MapKeys() { + resource := field.MapIndex(key) + nameField := resource.Elem().FieldByName("Name") + if nameField.IsValid() && nameField.Kind() == reflect.String { + assert.True( + t, + strings.Contains(nameField.String(), "dev"), + "process_environment_mode should rename '%s' in '%s'", + key, + resources.Type().Field(i).Name, + ) + } + } + } + } +} diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index ee09bb8b4..f278ea179 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -42,7 +42,7 @@ type Workspace struct { // CurrentUser holds the current user. // This is set after configuration initialization. - CurrentUser *iam.User `json:"current_user,omitempty" bundle:"readonly"` + CurrentUser *User `json:"current_user,omitempty" bundle:"readonly"` // Remote workspace base path for deployment state, for artifacts, as synchronization target. // This defaults to "~/.bundle/${bundle.name}/${bundle.environment}" where "~" expands to @@ -62,6 +62,13 @@ type Workspace struct { StatePath string `json:"state_path,omitempty"` } +type User struct { + // A short name for the user, based on the user's UserName. + ShortName string `json:"short_name,omitempty" bundle:"readonly"` + + *iam.User +} + func (w *Workspace) Client() (*databricks.WorkspaceClient, error) { cfg := databricks.Config{ // Generic diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index 77c64e529..84d79dc81 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -15,9 +15,10 @@ func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { } opts := sync.SyncOptions{ - LocalPath: b.Config.Path, - RemotePath: b.Config.Workspace.FilesPath, - Full: false, + LocalPath: b.Config.Path, + RemotePath: b.Config.Workspace.FilesPath, + Full: false, + CurrentUser: b.Config.Workspace.CurrentUser.User, SnapshotBasePath: cacheDir, WorkspaceClient: b.WorkspaceClient(), diff --git a/bundle/tests/job_and_pipeline/databricks.yml b/bundle/tests/job_and_pipeline/databricks.yml index d6942e8a7..e29fa0349 100644 --- a/bundle/tests/job_and_pipeline/databricks.yml +++ b/bundle/tests/job_and_pipeline/databricks.yml @@ -23,6 +23,7 @@ environments: development: false production: + mode: production resources: pipelines: nyc_taxi_pipeline: diff --git a/internal/sync_test.go b/internal/sync_test.go index 09418a855..66b5fd3ca 100644 --- a/internal/sync_test.go +++ b/internal/sync_test.go @@ -509,12 +509,12 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist(t *testing.T) { // Hypothetical repo path doesn't exist. nonExistingRepoPath := fmt.Sprintf("/Repos/%s/%s", me.UserName, RandomName("doesnt-exist-")) - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nonExistingRepoPath, nil) assert.ErrorContains(t, err, " does not exist; please create it first") // Paths nested under a hypothetical repo path should yield the same error. nestedPath := path.Join(nonExistingRepoPath, "nested/directory") - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath, nil) assert.ErrorContains(t, err, " does not exist; please create it first") } @@ -526,12 +526,12 @@ func TestAccSyncEnsureRemotePathIsUsableIfRepoExists(t *testing.T) { _, remoteRepoPath := setupRepo(t, wsc, ctx) // Repo itself is usable. - err := sync.EnsureRemotePathIsUsable(ctx, wsc, remoteRepoPath) + err := sync.EnsureRemotePathIsUsable(ctx, wsc, remoteRepoPath, nil) assert.NoError(t, err) // Path nested under repo path is usable. nestedPath := path.Join(remoteRepoPath, "nested/directory") - err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, nestedPath, nil) assert.NoError(t, err) // Verify that the directory has been created. @@ -549,7 +549,7 @@ func TestAccSyncEnsureRemotePathIsUsableInWorkspace(t *testing.T) { require.NoError(t, err) remotePath := fmt.Sprintf("/Users/%s/%s", me.UserName, RandomName("ensure-path-exists-test-")) - err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath) + err = sync.EnsureRemotePathIsUsable(ctx, wsc, remotePath, me) assert.NoError(t, err) // Clean up directory after test. diff --git a/libs/sync/path.go b/libs/sync/path.go index a04c28d30..97a908965 100644 --- a/libs/sync/path.go +++ b/libs/sync/path.go @@ -24,10 +24,16 @@ func repoPathForPath(me *iam.User, remotePath string) string { // EnsureRemotePathIsUsable checks if the specified path is nested under // expected base paths and if it is a directory or repository. -func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClient, remotePath string) error { - me, err := wsc.CurrentUser.Me(ctx) - if err != nil { - return err +func EnsureRemotePathIsUsable(ctx context.Context, wsc *databricks.WorkspaceClient, remotePath string, me *iam.User) error { + var err error + + // TODO: we should cache CurrentUser.Me at the SDK level + // for now we let clients pass in any existing user they might already have + if me == nil { + me, err = wsc.CurrentUser.Me(ctx) + if err != nil { + return err + } } // Ensure that the remote path exists. diff --git a/libs/sync/sync.go b/libs/sync/sync.go index 5c4c9d8f6..a299214d0 100644 --- a/libs/sync/sync.go +++ b/libs/sync/sync.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/iam" ) type SyncOptions struct { @@ -23,6 +24,8 @@ type SyncOptions struct { WorkspaceClient *databricks.WorkspaceClient + CurrentUser *iam.User + Host string } @@ -50,7 +53,7 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { } // Verify that the remote path we're about to synchronize to is valid and allowed. - err = EnsureRemotePathIsUsable(ctx, opts.WorkspaceClient, opts.RemotePath) + err = EnsureRemotePathIsUsable(ctx, opts.WorkspaceClient, opts.RemotePath, opts.CurrentUser) if err != nil { return nil, err } From 433f401c83bb4bbde9e5a378b9e750df088ac73a Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Sun, 30 Jul 2023 14:44:33 +0200 Subject: [PATCH 033/139] Add validation for Git settings in bundles (#578) ## Changes This checks whether the Git settings are consistent with the actual Git state of a source directory. (This PR adds to https://github.com/databricks/cli/pull/577.) Previously, we would silently let users configure their Git branch to e.g. `main` and deploy with that metadata even if they were actually on a different branch. With these changes, the following config would result in an error when deployed from any other branch than `main`: ``` bundle: name: example workspace: git: branch: main environments: ... ``` > not on the right Git branch: > expected according to configuration: main > actual: my-feature-branch It's not very useful to set the same branch for all environments, though. For development, it's better to just let the CLI auto-detect the right branch. Therefore, it's now possible to set the branch just for a single environment: ``` bundle: name: example 2 environments: development: default: true production: # production can only be deployed from the 'main' branch git: branch: main ``` Adding to that, the `mode: production` option actually checks that users explicitly set the Git branch as seen above. Setting that branch helps avoid mistakes, where someone accidentally deploys to production from the wrong branch. (I could see us offering an escape hatch for that in the future.) # Testing Manual testing to validate the experience and error messages. Automated unit tests. --------- Co-authored-by: Fabian Jakobs --- bundle/config/bundle.go | 3 + bundle/config/environment.go | 2 + bundle/config/git.go | 6 ++ bundle/config/mutator/load_git_details.go | 2 + .../mutator/process_environment_mode.go | 5 ++ .../mutator/process_environment_mode_test.go | 15 +++++ bundle/config/mutator/validate_git_details.go | 29 +++++++++ .../mutator/validate_git_details_test.go | 65 +++++++++++++++++++ bundle/config/root.go | 12 ++++ bundle/phases/deploy.go | 2 + bundle/tests/autoload_git/databricks.yml | 11 +++- bundle/tests/autoload_git_test.go | 15 +++-- cmd/bundle/deploy.go | 10 +-- cmd/bundle/destroy.go | 4 +- 14 files changed, 168 insertions(+), 13 deletions(-) create mode 100644 bundle/config/mutator/validate_git_details.go create mode 100644 bundle/config/mutator/validate_git_details_test.go diff --git a/bundle/config/bundle.go b/bundle/config/bundle.go index cf3864775..f3401477f 100644 --- a/bundle/config/bundle.go +++ b/bundle/config/bundle.go @@ -25,6 +25,9 @@ type Bundle struct { // Lock configures locking behavior on deployment. Lock Lock `json:"lock" bundle:"readonly"` + // Force-override Git branch validation. + Force bool `json:"force" bundle:"readonly"` + // Contains Git information like current commit, current branch and // origin url. Automatically loaded by reading .git directory if not specified Git Git `json:"git,omitempty"` diff --git a/bundle/config/environment.go b/bundle/config/environment.go index c1f4f4ad7..7152f791f 100644 --- a/bundle/config/environment.go +++ b/bundle/config/environment.go @@ -29,6 +29,8 @@ type Environment struct { // Does not permit defining new variables or redefining existing ones // in the scope of an environment Variables map[string]string `json:"variables,omitempty"` + + Git Git `json:"git,omitempty"` } const ( diff --git a/bundle/config/git.go b/bundle/config/git.go index 7ada8dfbc..760134a86 100644 --- a/bundle/config/git.go +++ b/bundle/config/git.go @@ -4,4 +4,10 @@ type Git struct { Branch string `json:"branch,omitempty"` OriginURL string `json:"origin_url,omitempty"` Commit string `json:"commit,omitempty" bundle:"readonly"` + + // Inferred is set to true if the Git details were inferred and weren't set explicitly + Inferred bool `json:"-" bundle:"readonly"` + + // The actual branch according to Git (may be different from the configured branch) + ActualBranch string `json:"-" bundle:"readonly"` } diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index 121924c62..f22aafe01 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -31,6 +31,8 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { log.Warnf(ctx, "failed to load current branch: %s", err) } else { b.Config.Bundle.Git.Branch = branch + b.Config.Bundle.Git.ActualBranch = branch + b.Config.Bundle.Git.Inferred = true } } // load commit hash if undefined diff --git a/bundle/config/mutator/process_environment_mode.go b/bundle/config/mutator/process_environment_mode.go index 65d8a6893..d20302347 100644 --- a/bundle/config/mutator/process_environment_mode.go +++ b/bundle/config/mutator/process_environment_mode.go @@ -109,6 +109,11 @@ func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string { } func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { + if b.Config.Bundle.Git.Inferred { + env := b.Config.Bundle.Environment + return fmt.Errorf("environment with 'mode: production' must specify an explicit 'environments.%s.git' configuration", env) + } + r := b.Config.Resources for i := range r.Pipelines { if r.Pipelines[i].Development { diff --git a/bundle/config/mutator/process_environment_mode_test.go b/bundle/config/mutator/process_environment_mode_test.go index 6f53abd89..36e0396e2 100644 --- a/bundle/config/mutator/process_environment_mode_test.go +++ b/bundle/config/mutator/process_environment_mode_test.go @@ -22,6 +22,10 @@ func mockBundle(mode config.Mode) *bundle.Bundle { Config: config.Root{ Bundle: config.Bundle{ Mode: mode, + Git: config.Git{ + OriginURL: "http://origin", + Branch: "main", + }, }, Workspace: config.Workspace{ CurrentUser: &config.User{ @@ -114,6 +118,17 @@ func TestProcessEnvironmentModeProduction(t *testing.T) { assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } +func TestProcessEnvironmentModeProductionGit(t *testing.T) { + bundle := mockBundle(config.Production) + + // Pretend the user didn't set Git configuration explicitly + bundle.Config.Bundle.Git.Inferred = true + + err := validateProductionMode(context.Background(), bundle, false) + require.ErrorContains(t, err, "git") + bundle.Config.Bundle.Git.Inferred = false +} + func TestProcessEnvironmentModeProductionOkForPrincipal(t *testing.T) { bundle := mockBundle(config.Production) diff --git a/bundle/config/mutator/validate_git_details.go b/bundle/config/mutator/validate_git_details.go new file mode 100644 index 000000000..116498bfc --- /dev/null +++ b/bundle/config/mutator/validate_git_details.go @@ -0,0 +1,29 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" +) + +type validateGitDetails struct{} + +func ValidateGitDetails() *validateGitDetails { + return &validateGitDetails{} +} + +func (m *validateGitDetails) Name() string { + return "ValidateGitDetails" +} + +func (m *validateGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { + if b.Config.Bundle.Git.Branch == "" || b.Config.Bundle.Git.ActualBranch == "" { + return nil + } + + if b.Config.Bundle.Git.Branch != b.Config.Bundle.Git.ActualBranch && !b.Config.Bundle.Force { + return fmt.Errorf("not on the right Git branch:\n expected according to configuration: %s\n actual: %s\nuse --force to override", b.Config.Bundle.Git.Branch, b.Config.Bundle.Git.ActualBranch) + } + return nil +} diff --git a/bundle/config/mutator/validate_git_details_test.go b/bundle/config/mutator/validate_git_details_test.go new file mode 100644 index 000000000..252964eeb --- /dev/null +++ b/bundle/config/mutator/validate_git_details_test.go @@ -0,0 +1,65 @@ +package mutator + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/assert" +) + +func TestValidateGitDetailsMatchingBranches(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "main", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + assert.NoError(t, err) +} + +func TestValidateGitDetailsNonMatchingBranches(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "feature", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + expectedError := "not on the right Git branch:\n expected according to configuration: main\n actual: feature\nuse --force to override" + assert.EqualError(t, err, expectedError) +} + +func TestValidateGitDetailsNotUsingGit(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Bundle: config.Bundle{ + Git: config.Git{ + Branch: "main", + ActualBranch: "", + }, + }, + }, + } + + m := ValidateGitDetails() + err := m.Apply(context.Background(), bundle) + + assert.NoError(t, err) +} diff --git a/bundle/config/root.go b/bundle/config/root.go index f5a4f00d3..52f887378 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -225,5 +225,17 @@ func (r *Root) MergeEnvironment(env *Environment) error { r.Bundle.ComputeID = env.ComputeID } + git := &r.Bundle.Git + if env.Git.Branch != "" { + git.Branch = env.Git.Branch + git.Inferred = false + } + if env.Git.Commit != "" { + git.Commit = env.Git.Commit + } + if env.Git.OriginURL != "" { + git.OriginURL = env.Git.OriginURL + } + return nil } diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 8b53273c7..011bb4b2b 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -3,6 +3,7 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" + "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" @@ -15,6 +16,7 @@ func Deploy() bundle.Mutator { lock.Acquire(), bundle.Defer( bundle.Seq( + mutator.ValidateGitDetails(), files.Upload(), libraries.MatchWithArtifacts(), artifacts.CleanUp(), diff --git a/bundle/tests/autoload_git/databricks.yml b/bundle/tests/autoload_git/databricks.yml index d0e1de60f..ba4785aed 100644 --- a/bundle/tests/autoload_git/databricks.yml +++ b/bundle/tests/autoload_git/databricks.yml @@ -1,4 +1,11 @@ bundle: name: autoload git config test - git: - branch: foo + +environments: + development: + default: true + + production: + # production can only be deployed from the 'main' branch + git: + branch: main diff --git a/bundle/tests/autoload_git_test.go b/bundle/tests/autoload_git_test.go index 87c7180e7..a1075198f 100644 --- a/bundle/tests/autoload_git_test.go +++ b/bundle/tests/autoload_git_test.go @@ -6,10 +6,15 @@ import ( "github.com/stretchr/testify/assert" ) -func TestGitConfig(t *testing.T) { +func TestAutoLoad(t *testing.T) { b := load(t, "./autoload_git") - assert.Equal(t, "foo", b.Config.Bundle.Git.Branch) - sshUrl := "git@github.com:databricks/cli.git" - httpsUrl := "https://github.com/databricks/cli" - assert.Contains(t, []string{sshUrl, httpsUrl}, b.Config.Bundle.Git.OriginURL) + assert.True(t, b.Config.Bundle.Git.Inferred) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestManuallySetBranch(t *testing.T) { + b := loadEnvironment(t, "./autoload_git", "production") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "main", b.Config.Bundle.Git.Branch) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") } diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index a39f19969..807bb982d 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -13,16 +13,18 @@ func newDeployCommand() *cobra.Command { PreRunE: ConfigureBundleWithVariables, } - var forceDeploy bool + var force bool + var forceLock bool var computeID string - cmd.Flags().BoolVar(&forceDeploy, "force", false, "Force acquisition of deployment lock.") + cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation.") + cmd.Flags().BoolVar(&forceLock, "force-deploy", false, "Force acquisition of deployment lock.") cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") cmd.RunE = func(cmd *cobra.Command, args []string) error { b := bundle.Get(cmd.Context()) - // If `--force` is specified, force acquisition of the deployment lock. - b.Config.Bundle.Lock.Force = forceDeploy + b.Config.Bundle.Force = force + b.Config.Bundle.Lock.Force = forceLock b.Config.Bundle.ComputeID = computeID return bundle.Apply(cmd.Context(), b, bundle.Seq( diff --git a/cmd/bundle/destroy.go b/cmd/bundle/destroy.go index 82d821441..22d998abe 100644 --- a/cmd/bundle/destroy.go +++ b/cmd/bundle/destroy.go @@ -23,13 +23,13 @@ func newDestroyCommand() *cobra.Command { var autoApprove bool var forceDestroy bool cmd.Flags().BoolVar(&autoApprove, "auto-approve", false, "Skip interactive approvals for deleting resources and files") - cmd.Flags().BoolVar(&forceDestroy, "force", false, "Force acquisition of deployment lock.") + cmd.Flags().BoolVar(&forceDestroy, "force-lock", false, "Force acquisition of deployment lock.") cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() b := bundle.Get(ctx) - // If `--force` is specified, force acquisition of the deployment lock. + // If `--force-lock` is specified, force acquisition of the deployment lock. b.Config.Bundle.Lock.Force = forceDestroy // If `--auto-approve`` is specified, we skip confirmation checks From 5a6177127f6196cfd0392d169003b1233f66cf00 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 31 Jul 2023 12:38:42 +0200 Subject: [PATCH 034/139] Fix failing fs mkdir test on azure (#627) Regex check was missing a "." character and adding it in fixes the test. The test now passes on all three cloud providers --- internal/fs_mkdir_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/fs_mkdir_test.go b/internal/fs_mkdir_test.go index 32d1d9c8b..83417c136 100644 --- a/internal/fs_mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -112,6 +112,6 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) // Different cloud providers return different errors. - regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) + regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file\.$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) assert.Regexp(t, regex, err.Error()) } From bb415ce6bb173b8b0d4018271df371264d3fbcd5 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Tue, 1 Aug 2023 14:33:19 +0200 Subject: [PATCH 035/139] Bump OpenAPI specification & Go SDK Version (#624) ## Changes Bump the OpenAPI specification and Go SDK version to the latest version. ## Tests --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + cmd/account/cmd.go | 2 + .../metastore-assignments.go | 19 +- cmd/account/metastores/metastores.go | 16 +- cmd/account/network-policy/network-policy.go | 243 ++++++++++++++++++ cmd/workspace/catalogs/catalogs.go | 1 + cmd/workspace/connections/connections.go | 3 - .../external-locations/external-locations.go | 4 + cmd/workspace/providers/providers.go | 8 +- .../recipient-activation.go | 15 +- cmd/workspace/recipients/recipients.go | 22 +- cmd/workspace/shares/shares.go | 10 +- go.mod | 2 +- go.sum | 4 +- 15 files changed, 310 insertions(+), 42 deletions(-) create mode 100755 cmd/account/network-policy/network-policy.go diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 1079283dd..c9e7a8be7 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -0a1949ba96f71680dad30e06973eaae85b1307bb \ No newline at end of file +7b57ba3a53f4de3d049b6a24391fe5474212daf8 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 714d6c53a..f9ea04180 100755 --- a/.gitattributes +++ b/.gitattributes @@ -10,6 +10,7 @@ cmd/account/ip-access-lists/ip-access-lists.go linguist-generated=true cmd/account/log-delivery/log-delivery.go linguist-generated=true cmd/account/metastore-assignments/metastore-assignments.go linguist-generated=true cmd/account/metastores/metastores.go linguist-generated=true +cmd/account/network-policy/network-policy.go linguist-generated=true cmd/account/networks/networks.go linguist-generated=true cmd/account/o-auth-enrollment/o-auth-enrollment.go linguist-generated=true cmd/account/private-access/private-access.go linguist-generated=true diff --git a/cmd/account/cmd.go b/cmd/account/cmd.go index 294801a68..2b06171db 100644 --- a/cmd/account/cmd.go +++ b/cmd/account/cmd.go @@ -16,6 +16,7 @@ import ( log_delivery "github.com/databricks/cli/cmd/account/log-delivery" account_metastore_assignments "github.com/databricks/cli/cmd/account/metastore-assignments" account_metastores "github.com/databricks/cli/cmd/account/metastores" + account_network_policy "github.com/databricks/cli/cmd/account/network-policy" networks "github.com/databricks/cli/cmd/account/networks" o_auth_enrollment "github.com/databricks/cli/cmd/account/o-auth-enrollment" private_access "github.com/databricks/cli/cmd/account/private-access" @@ -48,6 +49,7 @@ func New() *cobra.Command { cmd.AddCommand(log_delivery.New()) cmd.AddCommand(account_metastore_assignments.New()) cmd.AddCommand(account_metastores.New()) + cmd.AddCommand(account_network_policy.New()) cmd.AddCommand(networks.New()) cmd.AddCommand(o_auth_enrollment.New()) cmd.AddCommand(private_access.New()) diff --git a/cmd/account/metastore-assignments/metastore-assignments.go b/cmd/account/metastore-assignments/metastore-assignments.go index 8b571f1e5..24c4eb699 100755 --- a/cmd/account/metastore-assignments/metastore-assignments.go +++ b/cmd/account/metastore-assignments/metastore-assignments.go @@ -59,8 +59,7 @@ func newCreate() *cobra.Command { cmd.Short = `Assigns a workspace to a metastore.` cmd.Long = `Assigns a workspace to a metastore. - Creates an assignment to a metastore for a workspace Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + Creates an assignment to a metastore for a workspace` cmd.Annotations = make(map[string]string) @@ -86,11 +85,11 @@ func newCreate() *cobra.Command { } createReq.MetastoreId = args[1] - response, err := a.MetastoreAssignments.Create(ctx, createReq) + err = a.MetastoreAssignments.Create(ctx, createReq) if err != nil { return err } - return cmdio.Render(ctx, response) + return nil } // Disable completions since they are not applicable. @@ -132,8 +131,7 @@ func newDelete() *cobra.Command { cmd.Long = `Delete a metastore assignment. Deletes a metastore assignment to a workspace, leaving the workspace with no - metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API.` + metastore.` cmd.Annotations = make(map[string]string) @@ -201,8 +199,7 @@ func newGet() *cobra.Command { Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment will not be found and a - 404 returned. Please add a header X-Databricks-Account-Console-API-Version: - 2.0 to access this API.` + 404 returned.` cmd.Annotations = make(map[string]string) @@ -267,8 +264,7 @@ func newList() *cobra.Command { cmd.Long = `Get all workspaces assigned to a metastore. Gets a list of all Databricks workspace IDs that have been assigned to given - metastore. Please add a header X-Databricks-Account-Console-API-Version: 2.0 - to access this API` + metastore.` cmd.Annotations = make(map[string]string) @@ -334,8 +330,7 @@ func newUpdate() *cobra.Command { cmd.Long = `Updates a metastore assignment to a workspaces. Updates an assignment to a metastore for a workspace. Currently, only the - default catalog may be updated. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + default catalog may be updated.` cmd.Annotations = make(map[string]string) diff --git a/cmd/account/metastores/metastores.go b/cmd/account/metastores/metastores.go index 48c8a6b03..185f3642b 100755 --- a/cmd/account/metastores/metastores.go +++ b/cmd/account/metastores/metastores.go @@ -58,8 +58,7 @@ func newCreate() *cobra.Command { cmd.Short = `Create metastore.` cmd.Long = `Create metastore. - Creates a Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + Creates a Unity Catalog metastore.` cmd.Annotations = make(map[string]string) @@ -131,8 +130,7 @@ func newDelete() *cobra.Command { cmd.Short = `Delete a metastore.` cmd.Long = `Delete a metastore. - Deletes a Unity Catalog metastore for an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + Deletes a Unity Catalog metastore for an account, both specified by ID.` cmd.Annotations = make(map[string]string) @@ -193,8 +191,7 @@ func newGet() *cobra.Command { cmd.Short = `Get a metastore.` cmd.Long = `Get a metastore. - Gets a Unity Catalog metastore from an account, both specified by ID. Please - add a header X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + Gets a Unity Catalog metastore from an account, both specified by ID.` cmd.Annotations = make(map[string]string) @@ -250,9 +247,7 @@ func newList() *cobra.Command { cmd.Short = `Get all metastores associated with an account.` cmd.Long = `Get all metastores associated with an account. - Gets all Unity Catalog metastores associated with an account specified by ID. - Please add a header X-Databricks-Account-Console-API-Version: 2.0 to access - this API.` + Gets all Unity Catalog metastores associated with an account specified by ID.` cmd.Annotations = make(map[string]string) @@ -309,8 +304,7 @@ func newUpdate() *cobra.Command { cmd.Short = `Update a metastore.` cmd.Long = `Update a metastore. - Updates an existing Unity Catalog metastore. Please add a header - X-Databricks-Account-Console-API-Version: 2.0 to access this API.` + Updates an existing Unity Catalog metastore.` cmd.Annotations = make(map[string]string) diff --git a/cmd/account/network-policy/network-policy.go b/cmd/account/network-policy/network-policy.go new file mode 100755 index 000000000..60db933ab --- /dev/null +++ b/cmd/account/network-policy/network-policy.go @@ -0,0 +1,243 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package network_policy + +import ( + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/settings" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "network-policy", + Short: `Network policy is a set of rules that defines what can be accessed from your Databricks network.`, + Long: `Network policy is a set of rules that defines what can be accessed from your + Databricks network. E.g.: You can choose to block your SQL UDF to access + internet from your Databricks serverless clusters. + + There is only one instance of this setting per account. Since this setting has + a default value, this setting is present on all accounts even though it's + never set on a given account. Deletion reverts the value of the setting back + to the default value.`, + GroupID: "settings", + Annotations: map[string]string{ + "package": "settings", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start delete-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.DeleteAccountNetworkPolicyRequest, +) + +func newDeleteAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAccountNetworkPolicyReq settings.DeleteAccountNetworkPolicyRequest + + // TODO: short flags + + cmd.Use = "delete-account-network-policy ETAG" + cmd.Short = `Delete Account Network Policy.` + cmd.Long = `Delete Account Network Policy. + + Reverts back all the account network policies back to default.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + deleteAccountNetworkPolicyReq.Etag = args[0] + + response, err := a.NetworkPolicy.DeleteAccountNetworkPolicy(ctx, deleteAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAccountNetworkPolicyOverrides { + fn(cmd, &deleteAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAccountNetworkPolicy()) + }) +} + +// start read-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var readAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.ReadAccountNetworkPolicyRequest, +) + +func newReadAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var readAccountNetworkPolicyReq settings.ReadAccountNetworkPolicyRequest + + // TODO: short flags + + cmd.Use = "read-account-network-policy ETAG" + cmd.Short = `Get Account Network Policy.` + cmd.Long = `Get Account Network Policy. + + Gets the value of Account level Network Policy.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + readAccountNetworkPolicyReq.Etag = args[0] + + response, err := a.NetworkPolicy.ReadAccountNetworkPolicy(ctx, readAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range readAccountNetworkPolicyOverrides { + fn(cmd, &readAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newReadAccountNetworkPolicy()) + }) +} + +// start update-account-network-policy command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateAccountNetworkPolicyOverrides []func( + *cobra.Command, + *settings.UpdateAccountNetworkPolicyRequest, +) + +func newUpdateAccountNetworkPolicy() *cobra.Command { + cmd := &cobra.Command{} + + var updateAccountNetworkPolicyReq settings.UpdateAccountNetworkPolicyRequest + var updateAccountNetworkPolicyJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateAccountNetworkPolicyJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().BoolVar(&updateAccountNetworkPolicyReq.AllowMissing, "allow-missing", updateAccountNetworkPolicyReq.AllowMissing, `This should always be set to true for Settings RPCs.`) + // TODO: complex arg: setting + + cmd.Use = "update-account-network-policy" + cmd.Short = `Update Account Network Policy.` + cmd.Long = `Update Account Network Policy. + + Updates the policy content of Account level Network Policy.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustAccountClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + a := root.AccountClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateAccountNetworkPolicyJson.Unmarshal(&updateAccountNetworkPolicyReq) + if err != nil { + return err + } + } else { + } + + response, err := a.NetworkPolicy.UpdateAccountNetworkPolicy(ctx, updateAccountNetworkPolicyReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateAccountNetworkPolicyOverrides { + fn(cmd, &updateAccountNetworkPolicyReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdateAccountNetworkPolicy()) + }) +} + +// end service AccountNetworkPolicy diff --git a/cmd/workspace/catalogs/catalogs.go b/cmd/workspace/catalogs/catalogs.go index 2c520e4da..c17f6c22e 100755 --- a/cmd/workspace/catalogs/catalogs.go +++ b/cmd/workspace/catalogs/catalogs.go @@ -59,6 +59,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + cmd.Flags().StringVar(&createReq.ConnectionName, "connection-name", createReq.ConnectionName, `The name of the connection to an external data source.`) // TODO: map via StringToStringVar: properties cmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) cmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) diff --git a/cmd/workspace/connections/connections.go b/cmd/workspace/connections/connections.go index 966c5666c..7783b9eb7 100755 --- a/cmd/workspace/connections/connections.go +++ b/cmd/workspace/connections/connections.go @@ -35,9 +35,6 @@ func New() *cobra.Command { Annotations: map[string]string{ "package": "catalog", }, - - // This service is being previewed; hide from help output. - Hidden: true, } // Apply optional overrides to this command. diff --git a/cmd/workspace/external-locations/external-locations.go b/cmd/workspace/external-locations/external-locations.go index db6153df0..7f67b26b7 100755 --- a/cmd/workspace/external-locations/external-locations.go +++ b/cmd/workspace/external-locations/external-locations.go @@ -63,7 +63,9 @@ func newCreate() *cobra.Command { // TODO: short flags cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&createReq.AccessPoint, "access-point", createReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`) cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) + // TODO: complex arg: encryption_details cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `Indicates whether the external location is read-only.`) cmd.Flags().BoolVar(&createReq.SkipValidation, "skip-validation", createReq.SkipValidation, `Skips validation of the storage credential associated with the external location.`) @@ -322,8 +324,10 @@ func newUpdate() *cobra.Command { // TODO: short flags cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + cmd.Flags().StringVar(&updateReq.AccessPoint, "access-point", updateReq.AccessPoint, `The AWS access point to use when accesing s3 for this external location.`) cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&updateReq.CredentialName, "credential-name", updateReq.CredentialName, `Name of the storage credential used with this location.`) + // TODO: complex arg: encryption_details cmd.Flags().BoolVar(&updateReq.Force, "force", updateReq.Force, `Force update even if changing url invalidates dependent external tables or mounts.`) cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of the external location.`) cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The owner of the external location.`) diff --git a/cmd/workspace/providers/providers.go b/cmd/workspace/providers/providers.go index e893f3303..db2a98448 100755 --- a/cmd/workspace/providers/providers.go +++ b/cmd/workspace/providers/providers.go @@ -18,9 +18,11 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ - Use: "providers", - Short: `Databricks Providers REST API.`, - Long: `Databricks Providers REST API`, + Use: "providers", + Short: `A data provider is an object representing the organization in the real world who shares the data.`, + Long: `A data provider is an object representing the organization in the real world + who shares the data. A provider contains shares which further contain the + shared data.`, GroupID: "sharing", Annotations: map[string]string{ "package": "sharing", diff --git a/cmd/workspace/recipient-activation/recipient-activation.go b/cmd/workspace/recipient-activation/recipient-activation.go index fa0e6a83f..c73b4b4a9 100755 --- a/cmd/workspace/recipient-activation/recipient-activation.go +++ b/cmd/workspace/recipient-activation/recipient-activation.go @@ -15,9 +15,18 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ - Use: "recipient-activation", - Short: `Databricks Recipient Activation REST API.`, - Long: `Databricks Recipient Activation REST API`, + Use: "recipient-activation", + Short: `The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of TOKEN.`, + Long: `The Recipient Activation API is only applicable in the open sharing model + where the recipient object has the authentication type of TOKEN. The data + recipient follows the activation link shared by the data provider to download + the credential file that includes the access token. The recipient will then + use the credential file to establish a secure connection with the provider to + receive the shared data. + + Note that you can download the credential file only once. Recipients should + treat the downloaded credential as a secret and must not share it outside of + their organization.`, GroupID: "sharing", Annotations: map[string]string{ "package": "sharing", diff --git a/cmd/workspace/recipients/recipients.go b/cmd/workspace/recipients/recipients.go index c96e5fc59..ceed57848 100755 --- a/cmd/workspace/recipients/recipients.go +++ b/cmd/workspace/recipients/recipients.go @@ -18,9 +18,25 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ - Use: "recipients", - Short: `Databricks Recipients REST API.`, - Long: `Databricks Recipients REST API`, + Use: "recipients", + Short: `A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares.`, + Long: `A recipient is an object you create using :method:recipients/create to + represent an organization which you want to allow access shares. The way how + sharing works differs depending on whether or not your recipient has access to + a Databricks workspace that is enabled for Unity Catalog: + + - For recipients with access to a Databricks workspace that is enabled for + Unity Catalog, you can create a recipient object along with a unique sharing + identifier you get from the recipient. The sharing identifier is the key + identifier that enables the secure connection. This sharing mode is called + **Databricks-to-Databricks sharing**. + + - For recipients without access to a Databricks workspace that is enabled for + Unity Catalog, when you create a recipient object, Databricks generates an + activation link you can send to the recipient. The recipient follows the + activation link to download the credential file, and then uses the credential + file to establish a secure connection to receive the shared data. This sharing + mode is called **open sharing**.`, GroupID: "sharing", Annotations: map[string]string{ "package": "sharing", diff --git a/cmd/workspace/shares/shares.go b/cmd/workspace/shares/shares.go index 7643567a9..cf96b8b3a 100755 --- a/cmd/workspace/shares/shares.go +++ b/cmd/workspace/shares/shares.go @@ -16,9 +16,13 @@ var cmdOverrides []func(*cobra.Command) func New() *cobra.Command { cmd := &cobra.Command{ - Use: "shares", - Short: `Databricks Shares REST API.`, - Long: `Databricks Shares REST API`, + Use: "shares", + Short: `A share is a container instantiated with :method:shares/create.`, + Long: `A share is a container instantiated with :method:shares/create. Once created + you can iteratively register a collection of existing data assets defined + within the metastore using :method:shares/update. You can register data assets + under their original name, qualified by their original schema, or provide + alternate exposed names.`, GroupID: "sharing", Annotations: map[string]string{ "package": "sharing", diff --git a/go.mod b/go.mod index b839d11bf..8f4051e1e 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.18 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.13.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.14.1 // Apache 2.0 github.com/fatih/color v1.15.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.3.0 // BSD-3-Clause diff --git a/go.sum b/go.sum index 1071343e5..38a551088 100644 --- a/go.sum +++ b/go.sum @@ -34,8 +34,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/databricks/databricks-sdk-go v0.13.0 h1:Npi4laUUmcOPDPdJf2ZMGFUtybpf4LK6n5NQY56Ya2Q= -github.com/databricks/databricks-sdk-go v0.13.0/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= +github.com/databricks/databricks-sdk-go v0.14.1 h1:s9x18c2i6XbJxem6zKdTrrwEUXQX/Nzn0iVM+qGlRus= +github.com/databricks/databricks-sdk-go v0.14.1/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= From fc8729d162cc14518fe16b49751ab85ee7848c99 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 1 Aug 2023 15:43:27 +0200 Subject: [PATCH 036/139] Only treat files with .tmpl extension as templates (#594) ## Changes In a world before this PR, all files would be treated as `go text templates`, making the content in these files quake in fear since they would be executed (as a template). This PR makes it so that only files with the `.tmpl` extension are understood to be templates. This is useful for avoiding ambiguity in cases like where a binary file could be interpreted as a go text template otherwise. In order to do so, we introduce the `copyFile` struct which does a copy of the source file from the template without loading it into memory. ## Tests Unit tests --- libs/template/file.go | 101 ++++++++ libs/template/file_test.go | 111 +++++++++ libs/template/helpers_test.go | 6 +- libs/template/renderer.go | 101 ++++---- libs/template/renderer_test.go | 216 +++++++++--------- .../copy-file-walk/template/not-a-template | 1 + .../template/{my_email => my_email.tmpl} | 0 .../{not-a-script => not-a-script.tmpl} | 0 .../template/{script.sh => script.sh.tmpl} | 0 .../fail/template/{hello => hello.tmpl} | 0 .../template/{file3 => file3.tmpl} | 0 .../template/dir1/{file1 => file1.tmpl} | 0 .../template/{file2 => file2.tmpl} | 0 .../template/dir1/dir2/{file3 => file3.tmpl} | 0 .../template/dir1/{file2 => file2.tmpl} | 0 .../template/{file1 => file1.tmpl} | 0 .../skip/template/{file1 => file1.tmpl} | 0 .../skip/template/{file2 => file2.tmpl} | 0 .../walk/template/dir2/{file4 => file4.tmpl} | 0 19 files changed, 370 insertions(+), 166 deletions(-) create mode 100644 libs/template/file.go create mode 100644 libs/template/file_test.go create mode 100644 libs/template/testdata/copy-file-walk/template/not-a-template rename libs/template/testdata/email/template/{my_email => my_email.tmpl} (100%) rename libs/template/testdata/executable-bit-read/template/{not-a-script => not-a-script.tmpl} (100%) rename libs/template/testdata/executable-bit-read/template/{script.sh => script.sh.tmpl} (100%) rename libs/template/testdata/fail/template/{hello => hello.tmpl} (100%) rename libs/template/testdata/skip-all-files-in-cwd/template/{file3 => file3.tmpl} (100%) rename libs/template/testdata/skip-dir-eagerly/template/dir1/{file1 => file1.tmpl} (100%) rename libs/template/testdata/skip-dir-eagerly/template/{file2 => file2.tmpl} (100%) rename libs/template/testdata/skip-is-relative/template/dir1/dir2/{file3 => file3.tmpl} (100%) rename libs/template/testdata/skip-is-relative/template/dir1/{file2 => file2.tmpl} (100%) rename libs/template/testdata/skip-is-relative/template/{file1 => file1.tmpl} (100%) rename libs/template/testdata/skip/template/{file1 => file1.tmpl} (100%) rename libs/template/testdata/skip/template/{file2 => file2.tmpl} (100%) rename libs/template/testdata/walk/template/dir2/{file4 => file4.tmpl} (100%) diff --git a/libs/template/file.go b/libs/template/file.go new file mode 100644 index 000000000..aafb1acfa --- /dev/null +++ b/libs/template/file.go @@ -0,0 +1,101 @@ +package template + +import ( + "context" + "io" + "io/fs" + "os" + "path/filepath" + + "github.com/databricks/cli/libs/filer" +) + +// Interface representing a file to be materialized from a template into a project +// instance +type file interface { + // Destination path for file. This is where the file will be created when + // PersistToDisk is called. + DstPath() *destinationPath + + // Write file to disk at the destination path. + PersistToDisk() error +} + +type destinationPath struct { + // Root path for the project instance. This path uses the system's default + // file separator. For example /foo/bar on Unix and C:\foo\bar on windows + root string + + // Unix like file path relative to the "root" of the instantiated project. Is used to + // evaluate whether the file should be skipped by comparing it to a list of + // skip glob patterns. + relPath string +} + +// Absolute path of the file, in the os native format. For example /foo/bar on +// Unix and C:\foo\bar on windows +func (f *destinationPath) absPath() string { + return filepath.Join(f.root, filepath.FromSlash(f.relPath)) +} + +type copyFile struct { + ctx context.Context + + // Permissions bits for the destination file + perm fs.FileMode + + dstPath *destinationPath + + // Filer rooted at template root. Used to read srcPath. + srcFiler filer.Filer + + // Relative path from template root for file to be copied. + srcPath string +} + +func (f *copyFile) DstPath() *destinationPath { + return f.dstPath +} + +func (f *copyFile) PersistToDisk() error { + path := f.DstPath().absPath() + err := os.MkdirAll(filepath.Dir(path), 0755) + if err != nil { + return err + } + srcFile, err := f.srcFiler.Read(f.ctx, f.srcPath) + if err != nil { + return err + } + defer srcFile.Close() + dstFile, err := os.OpenFile(path, os.O_CREATE|os.O_EXCL|os.O_WRONLY, f.perm) + if err != nil { + return err + } + defer dstFile.Close() + _, err = io.Copy(dstFile, srcFile) + return err +} + +type inMemoryFile struct { + dstPath *destinationPath + + content []byte + + // Permissions bits for the destination file + perm fs.FileMode +} + +func (f *inMemoryFile) DstPath() *destinationPath { + return f.dstPath +} + +func (f *inMemoryFile) PersistToDisk() error { + path := f.DstPath().absPath() + + err := os.MkdirAll(filepath.Dir(path), 0755) + if err != nil { + return err + } + return os.WriteFile(path, f.content, f.perm) +} diff --git a/libs/template/file_test.go b/libs/template/file_test.go new file mode 100644 index 000000000..85938895e --- /dev/null +++ b/libs/template/file_test.go @@ -0,0 +1,111 @@ +package template + +import ( + "context" + "io/fs" + "os" + "path/filepath" + "runtime" + "testing" + + "github.com/databricks/cli/libs/filer" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testInMemoryFile(t *testing.T, perm fs.FileMode) { + tmpDir := t.TempDir() + + f := &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, + perm: perm, + content: []byte("123"), + } + err := f.PersistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), perm) +} + +func testCopyFile(t *testing.T, perm fs.FileMode) { + tmpDir := t.TempDir() + + templateFiler, err := filer.NewLocalClient(tmpDir) + require.NoError(t, err) + err = os.WriteFile(filepath.Join(tmpDir, "source"), []byte("qwerty"), perm) + require.NoError(t, err) + + f := ©File{ + ctx: context.Background(), + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, + perm: perm, + srcPath: "source", + srcFiler: templateFiler, + } + err = f.PersistToDisk() + assert.NoError(t, err) + + assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "qwerty") + assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), perm) +} + +func TestTemplateFileDestinationPath(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + f := &destinationPath{ + root: `a/b/c`, + relPath: "d/e", + } + assert.Equal(t, `a/b/c/d/e`, f.absPath()) +} + +func TestTemplateFileDestinationPathForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + f := &destinationPath{ + root: `c:\a\b\c`, + relPath: "d/e", + } + assert.Equal(t, `c:\a\b\c\d\e`, f.absPath()) +} + +func TestTemplateInMemoryFilePersistToDisk(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + testInMemoryFile(t, 0755) +} + +func TestTemplateInMemoryFilePersistToDiskForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + // we have separate tests for windows because of differences in valid + // fs.FileMode values we can use for different operating systems. + testInMemoryFile(t, 0666) +} + +func TestTemplateCopyFilePersistToDisk(t *testing.T) { + if runtime.GOOS == "windows" { + t.SkipNow() + } + testCopyFile(t, 0644) +} + +func TestTemplateCopyFilePersistToDiskForWindows(t *testing.T) { + if runtime.GOOS != "windows" { + t.SkipNow() + } + // we have separate tests for windows because of differences in valid + // fs.FileMode values we can use for different operating systems. + testCopyFile(t, 0666) +} diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index e904edecc..169e06f35 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -20,7 +20,7 @@ func TestTemplatePrintStringWithoutProcessing(t *testing.T) { assert.NoError(t, err) assert.Len(t, r.files, 1) - cleanContent := strings.Trim(string(r.files[0].content), "\n\r") + cleanContent := strings.Trim(string(r.files[0].(*inMemoryFile).content), "\n\r") assert.Equal(t, `{{ fail "abc" }}`, cleanContent) } @@ -35,7 +35,7 @@ func TestTemplateRegexpCompileFunction(t *testing.T) { assert.NoError(t, err) assert.Len(t, r.files, 1) - content := string(r.files[0].content) + content := string(r.files[0].(*inMemoryFile).content) assert.Contains(t, content, "0:food") assert.Contains(t, content, "1:fool") } @@ -52,5 +52,5 @@ func TestTemplateUrlFunction(t *testing.T) { assert.NoError(t, err) assert.Len(t, r.files, 1) - assert.Equal(t, "https://www.databricks.com", string(r.files[0].content)) + assert.Equal(t, "https://www.databricks.com", string(r.files[0].(*inMemoryFile).content)) } diff --git a/libs/template/renderer.go b/libs/template/renderer.go index 8502a9288..c7e79841c 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -5,7 +5,6 @@ import ( "errors" "fmt" "io" - "io/fs" "os" "path" "path/filepath" @@ -18,32 +17,7 @@ import ( "golang.org/x/exp/slices" ) -type inMemoryFile struct { - // Root path for the project instance. This path uses the system's default - // file separator. For example /foo/bar on Unix and C:\foo\bar on windows - root string - - // Unix like relPath for the file (using '/' as the separator). This path - // is relative to the root. Using unix like relative paths enables skip patterns - // to work across both windows and unix based operating systems. - relPath string - content []byte - perm fs.FileMode -} - -func (f *inMemoryFile) fullPath() string { - return filepath.Join(f.root, filepath.FromSlash(f.relPath)) -} - -func (f *inMemoryFile) persistToDisk() error { - path := f.fullPath() - - err := os.MkdirAll(filepath.Dir(path), 0755) - if err != nil { - return err - } - return os.WriteFile(path, f.content, f.perm) -} +const templateExtension = ".tmpl" // Renders a databricks template as a project type renderer struct { @@ -60,7 +34,7 @@ type renderer struct { baseTemplate *template.Template // List of in memory files generated from template - files []*inMemoryFile + files []file // Glob patterns for files and directories to skip. There are three possible // outcomes for skip: @@ -111,7 +85,7 @@ func newRenderer(ctx context.Context, config map[string]any, templateRoot, libra ctx: ctx, config: config, baseTemplate: tmpl, - files: make([]*inMemoryFile, 0), + files: make([]file, 0), skipPatterns: make([]string, 0), templateFiler: templateFiler, instanceRoot: instanceRoot, @@ -142,17 +116,7 @@ func (r *renderer) executeTemplate(templateDefinition string) (string, error) { return result.String(), nil } -func (r *renderer) computeFile(relPathTemplate string) (*inMemoryFile, error) { - // read template file contents - templateReader, err := r.templateFiler.Read(r.ctx, relPathTemplate) - if err != nil { - return nil, err - } - contentTemplate, err := io.ReadAll(templateReader) - if err != nil { - return nil, err - } - +func (r *renderer) computeFile(relPathTemplate string) (file, error) { // read file permissions info, err := r.templateFiler.Stat(r.ctx, relPathTemplate) if err != nil { @@ -160,7 +124,33 @@ func (r *renderer) computeFile(relPathTemplate string) (*inMemoryFile, error) { } perm := info.Mode().Perm() + // If file name does not specify the `.tmpl` extension, then it is copied + // over as is, without treating it as a template + if !strings.HasSuffix(relPathTemplate, templateExtension) { + return ©File{ + dstPath: &destinationPath{ + root: r.instanceRoot, + relPath: relPathTemplate, + }, + perm: perm, + ctx: r.ctx, + srcPath: relPathTemplate, + srcFiler: r.templateFiler, + }, nil + } + + // read template file's content + templateReader, err := r.templateFiler.Read(r.ctx, relPathTemplate) + if err != nil { + return nil, err + } + defer templateReader.Close() + // execute the contents of the file as a template + contentTemplate, err := io.ReadAll(templateReader) + if err != nil { + return nil, err + } content, err := r.executeTemplate(string(contentTemplate)) // Capture errors caused by the "fail" helper function if target := (&ErrFail{}); errors.As(err, target) { @@ -171,16 +161,19 @@ func (r *renderer) computeFile(relPathTemplate string) (*inMemoryFile, error) { } // Execute relative path template to get materialized path for the file + relPathTemplate = strings.TrimSuffix(relPathTemplate, templateExtension) relPath, err := r.executeTemplate(relPathTemplate) if err != nil { return nil, err } return &inMemoryFile{ - root: r.instanceRoot, - relPath: relPath, - content: []byte(content), + dstPath: &destinationPath{ + root: r.instanceRoot, + relPath: relPath, + }, perm: perm, + content: []byte(content), }, nil } @@ -206,11 +199,11 @@ func (r *renderer) walk() error { if err != nil { return err } - isSkipped, err := r.isSkipped(instanceDirectory) + match, err := isSkipped(instanceDirectory, r.skipPatterns) if err != nil { return err } - if isSkipped { + if match { logger.Infof(r.ctx, "skipping directory: %s", instanceDirectory) continue } @@ -255,7 +248,7 @@ func (r *renderer) walk() error { if err != nil { return err } - logger.Infof(r.ctx, "added file to list of in memory files: %s", f.relPath) + logger.Infof(r.ctx, "added file to list of possible project files: %s", f.DstPath().relPath) r.files = append(r.files, f) } @@ -266,14 +259,14 @@ func (r *renderer) walk() error { func (r *renderer) persistToDisk() error { // Accumulate files which we will persist, skipping files whose path matches // any of the skip patterns - filesToPersist := make([]*inMemoryFile, 0) + filesToPersist := make([]file, 0) for _, file := range r.files { - isSkipped, err := r.isSkipped(file.relPath) + match, err := isSkipped(file.DstPath().relPath, r.skipPatterns) if err != nil { return err } - if isSkipped { - log.Infof(r.ctx, "skipping file: %s", file.relPath) + if match { + log.Infof(r.ctx, "skipping file: %s", file.DstPath()) continue } filesToPersist = append(filesToPersist, file) @@ -281,7 +274,7 @@ func (r *renderer) persistToDisk() error { // Assert no conflicting files exist for _, file := range filesToPersist { - path := file.fullPath() + path := file.DstPath().absPath() _, err := os.Stat(path) if err == nil { return fmt.Errorf("failed to persist to disk, conflict with existing file: %s", path) @@ -293,7 +286,7 @@ func (r *renderer) persistToDisk() error { // Persist files to disk for _, file := range filesToPersist { - err := file.persistToDisk() + err := file.PersistToDisk() if err != nil { return err } @@ -301,8 +294,8 @@ func (r *renderer) persistToDisk() error { return nil } -func (r *renderer) isSkipped(filePath string) (bool, error) { - for _, pattern := range r.skipPatterns { +func isSkipped(filePath string, patterns []string) (bool, error) { + for _, pattern := range patterns { isMatch, err := path.Match(pattern, filePath) if err != nil { return false, err diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index 468c607f4..8cd89ae99 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -3,6 +3,7 @@ package template import ( "context" "fmt" + "io" "io/fs" "os" "path/filepath" @@ -89,59 +90,58 @@ My email is {{template "email"}} } func TestRendererIsSkipped(t *testing.T) { - r := renderer{ - skipPatterns: []string{"a*", "*yz", "def", "a/b/*"}, - } + + skipPatterns := []string{"a*", "*yz", "def", "a/b/*"} // skipped paths - isSkipped, err := r.isSkipped("abc") + match, err := isSkipped("abc", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) - isSkipped, err = r.isSkipped("abcd") + match, err = isSkipped("abcd", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) - isSkipped, err = r.isSkipped("a") + match, err = isSkipped("a", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) - isSkipped, err = r.isSkipped("xxyz") + match, err = isSkipped("xxyz", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) - isSkipped, err = r.isSkipped("yz") + match, err = isSkipped("yz", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) - isSkipped, err = r.isSkipped("a/b/c") + match, err = isSkipped("a/b/c", skipPatterns) require.NoError(t, err) - assert.True(t, isSkipped) + assert.True(t, match) // NOT skipped paths - isSkipped, err = r.isSkipped(".") + match, err = isSkipped(".", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) - isSkipped, err = r.isSkipped("y") + match, err = isSkipped("y", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) - isSkipped, err = r.isSkipped("z") + match, err = isSkipped("z", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) - isSkipped, err = r.isSkipped("defg") + match, err = isSkipped("defg", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) - isSkipped, err = r.isSkipped("cat") + match, err = isSkipped("cat", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) - isSkipped, err = r.isSkipped("a/b/c/d") + match, err = isSkipped("a/b/c/d", skipPatterns) require.NoError(t, err) - assert.False(t, isSkipped) + assert.False(t, match) } func TestRendererPersistToDisk(t *testing.T) { @@ -152,30 +152,38 @@ func TestRendererPersistToDisk(t *testing.T) { ctx: ctx, instanceRoot: tmpDir, skipPatterns: []string{"a/b/c", "mn*"}, - files: []*inMemoryFile{ - { - root: tmpDir, - relPath: "a/b/c", - content: nil, + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/c", + }, perm: 0444, - }, - { - root: tmpDir, - relPath: "mno", content: nil, - perm: 0444, }, - { - root: tmpDir, - relPath: "a/b/d", + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "mno", + }, + perm: 0444, + content: nil, + }, + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a/b/d", + }, + perm: 0444, content: []byte("123"), - perm: 0444, }, - { - root: tmpDir, - relPath: "mmnn", - content: []byte("456"), + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "mmnn", + }, perm: 0444, + content: []byte("456"), }, }, } @@ -204,8 +212,20 @@ func TestRendererWalk(t *testing.T) { getContent := func(r *renderer, path string) string { for _, f := range r.files { - if f.relPath == path { - return strings.Trim(string(f.content), "\r\n") + if f.DstPath().relPath != path { + continue + } + switch v := f.(type) { + case *inMemoryFile: + return strings.Trim(string(v.content), "\r\n") + case *copyFile: + r, err := r.templateFiler.Read(context.Background(), v.srcPath) + require.NoError(t, err) + b, err := io.ReadAll(r) + require.NoError(t, err) + return strings.Trim(string(b), "\r\n") + default: + require.FailNow(t, "execution should not reach here") } } require.FailNow(t, "file is absent: "+path) @@ -241,7 +261,7 @@ func TestRendererSkipsDirsEagerly(t *testing.T) { assert.NoError(t, err) assert.Len(t, r.files, 1) - content := string(r.files[0].content) + content := string(r.files[0].(*inMemoryFile).content) assert.Equal(t, "I should be the only file created", strings.Trim(content, "\r\n")) } @@ -309,55 +329,6 @@ func TestRendererSkip(t *testing.T) { assert.NoFileExists(t, filepath.Join(tmpDir, "dir2/file6")) } -func TestRendererInMemoryFileFullPathForWindows(t *testing.T) { - if runtime.GOOS != "windows" { - t.SkipNow() - } - f := &inMemoryFile{ - root: `c:\a\b\c`, - relPath: "d/e", - } - assert.Equal(t, `c:\a\b\c\d\e`, f.fullPath()) -} - -func TestRendererInMemoryFilePersistToDiskSetsExecutableBit(t *testing.T) { - if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { - t.SkipNow() - } - tmpDir := t.TempDir() - - f := &inMemoryFile{ - root: tmpDir, - relPath: "a/b/c", - content: []byte("123"), - perm: 0755, - } - err := f.persistToDisk() - assert.NoError(t, err) - - assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") - assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), 0755) -} - -func TestRendererInMemoryFilePersistToDiskForWindows(t *testing.T) { - if runtime.GOOS != "windows" { - t.SkipNow() - } - tmpDir := t.TempDir() - - f := &inMemoryFile{ - root: tmpDir, - relPath: "a/b/c", - content: []byte("123"), - perm: 0666, - } - err := f.persistToDisk() - assert.NoError(t, err) - - assertFileContent(t, filepath.Join(tmpDir, "a/b/c"), "123") - assertFilePermissions(t, filepath.Join(tmpDir, "a/b/c"), 0666) -} - func TestRendererReadsPermissionsBits(t *testing.T) { if runtime.GOOS != "linux" && runtime.GOOS != "darwin" { t.SkipNow() @@ -373,8 +344,16 @@ func TestRendererReadsPermissionsBits(t *testing.T) { getPermissions := func(r *renderer, path string) fs.FileMode { for _, f := range r.files { - if f.relPath == path { - return f.perm + if f.DstPath().relPath != path { + continue + } + switch v := f.(type) { + case *inMemoryFile: + return v.perm + case *copyFile: + return v.perm + default: + require.FailNow(t, "execution should not reach here") } } require.FailNow(t, "file is absent: "+path) @@ -396,12 +375,14 @@ func TestRendererErrorOnConflictingFile(t *testing.T) { r := renderer{ skipPatterns: []string{}, - files: []*inMemoryFile{ - { - root: tmpDir, - relPath: "a", - content: []byte("123"), + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a", + }, perm: 0444, + content: []byte("123"), }, }, } @@ -421,12 +402,14 @@ func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { r := renderer{ ctx: ctx, skipPatterns: []string{"a"}, - files: []*inMemoryFile{ - { - root: tmpDir, - relPath: "a", - content: []byte("123"), + files: []file{ + &inMemoryFile{ + dstPath: &destinationPath{ + root: tmpDir, + relPath: "a", + }, perm: 0444, + content: []byte("123"), }, }, } @@ -436,3 +419,18 @@ func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { assert.NoError(t, err) assert.Len(t, r.files, 1) } + +func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/copy-file-walk/template", "./testdata/copy-file-walk/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, r.files[0].(*copyFile).srcPath, "not-a-template") + assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "not-a-template")) +} diff --git a/libs/template/testdata/copy-file-walk/template/not-a-template b/libs/template/testdata/copy-file-walk/template/not-a-template new file mode 100644 index 000000000..8baef1b4a --- /dev/null +++ b/libs/template/testdata/copy-file-walk/template/not-a-template @@ -0,0 +1 @@ +abc diff --git a/libs/template/testdata/email/template/my_email b/libs/template/testdata/email/template/my_email.tmpl similarity index 100% rename from libs/template/testdata/email/template/my_email rename to libs/template/testdata/email/template/my_email.tmpl diff --git a/libs/template/testdata/executable-bit-read/template/not-a-script b/libs/template/testdata/executable-bit-read/template/not-a-script.tmpl similarity index 100% rename from libs/template/testdata/executable-bit-read/template/not-a-script rename to libs/template/testdata/executable-bit-read/template/not-a-script.tmpl diff --git a/libs/template/testdata/executable-bit-read/template/script.sh b/libs/template/testdata/executable-bit-read/template/script.sh.tmpl similarity index 100% rename from libs/template/testdata/executable-bit-read/template/script.sh rename to libs/template/testdata/executable-bit-read/template/script.sh.tmpl diff --git a/libs/template/testdata/fail/template/hello b/libs/template/testdata/fail/template/hello.tmpl similarity index 100% rename from libs/template/testdata/fail/template/hello rename to libs/template/testdata/fail/template/hello.tmpl diff --git a/libs/template/testdata/skip-all-files-in-cwd/template/file3 b/libs/template/testdata/skip-all-files-in-cwd/template/file3.tmpl similarity index 100% rename from libs/template/testdata/skip-all-files-in-cwd/template/file3 rename to libs/template/testdata/skip-all-files-in-cwd/template/file3.tmpl diff --git a/libs/template/testdata/skip-dir-eagerly/template/dir1/file1 b/libs/template/testdata/skip-dir-eagerly/template/dir1/file1.tmpl similarity index 100% rename from libs/template/testdata/skip-dir-eagerly/template/dir1/file1 rename to libs/template/testdata/skip-dir-eagerly/template/dir1/file1.tmpl diff --git a/libs/template/testdata/skip-dir-eagerly/template/file2 b/libs/template/testdata/skip-dir-eagerly/template/file2.tmpl similarity index 100% rename from libs/template/testdata/skip-dir-eagerly/template/file2 rename to libs/template/testdata/skip-dir-eagerly/template/file2.tmpl diff --git a/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 b/libs/template/testdata/skip-is-relative/template/dir1/dir2/file3.tmpl similarity index 100% rename from libs/template/testdata/skip-is-relative/template/dir1/dir2/file3 rename to libs/template/testdata/skip-is-relative/template/dir1/dir2/file3.tmpl diff --git a/libs/template/testdata/skip-is-relative/template/dir1/file2 b/libs/template/testdata/skip-is-relative/template/dir1/file2.tmpl similarity index 100% rename from libs/template/testdata/skip-is-relative/template/dir1/file2 rename to libs/template/testdata/skip-is-relative/template/dir1/file2.tmpl diff --git a/libs/template/testdata/skip-is-relative/template/file1 b/libs/template/testdata/skip-is-relative/template/file1.tmpl similarity index 100% rename from libs/template/testdata/skip-is-relative/template/file1 rename to libs/template/testdata/skip-is-relative/template/file1.tmpl diff --git a/libs/template/testdata/skip/template/file1 b/libs/template/testdata/skip/template/file1.tmpl similarity index 100% rename from libs/template/testdata/skip/template/file1 rename to libs/template/testdata/skip/template/file1.tmpl diff --git a/libs/template/testdata/skip/template/file2 b/libs/template/testdata/skip/template/file2.tmpl similarity index 100% rename from libs/template/testdata/skip/template/file2 rename to libs/template/testdata/skip/template/file2.tmpl diff --git a/libs/template/testdata/walk/template/dir2/file4 b/libs/template/testdata/walk/template/dir2/file4.tmpl similarity index 100% rename from libs/template/testdata/walk/template/dir2/file4 rename to libs/template/testdata/walk/template/dir2/file4.tmpl From 5df8935de4038ae75347a8b1e1d304c29c215ed1 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 1 Aug 2023 16:09:27 +0200 Subject: [PATCH 037/139] Add JSON schema validation for input template parameters (#598) ## Changes This PR: 1. Adds code for reading template configs and validating them against a JSON schema. 2. Moves the json schema struct in `bundle/schema` to a separate library package. This struct is now reused for validating template configs. ## Tests Unit tests --- bundle/schema/docs.go | 17 +- bundle/schema/docs_test.go | 11 +- bundle/schema/openapi.go | 23 +-- bundle/schema/openapi_test.go | 17 +- bundle/schema/schema.go | 94 +++-------- libs/jsonschema/schema.go | 49 ++++++ libs/template/schema.go | 121 ++++++++++++++ libs/template/schema_test.go | 274 +++++++++++++++++++++++++++++++ libs/template/validators.go | 60 +++++++ libs/template/validators_test.go | 76 +++++++++ 10 files changed, 642 insertions(+), 100 deletions(-) create mode 100644 libs/jsonschema/schema.go create mode 100644 libs/template/schema.go create mode 100644 libs/template/schema_test.go create mode 100644 libs/template/validators.go create mode 100644 libs/template/validators_test.go diff --git a/bundle/schema/docs.go b/bundle/schema/docs.go index 13a4549d0..5fcef4edd 100644 --- a/bundle/schema/docs.go +++ b/bundle/schema/docs.go @@ -8,6 +8,7 @@ import ( "reflect" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" ) @@ -39,7 +40,7 @@ func BundleDocs(openapiSpecPath string) (*Docs, error) { } openapiReader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } resourcesDocs, err := openapiReader.ResourcesDocs() if err != nil { @@ -88,22 +89,22 @@ func initializeBundleDocs() (*Docs, error) { } // *Docs are a subset of *Schema, this function selects that subset -func schemaToDocs(schema *Schema) *Docs { +func schemaToDocs(jsonSchema *jsonschema.Schema) *Docs { // terminate recursion if schema is nil - if schema == nil { + if jsonSchema == nil { return nil } docs := &Docs{ - Description: schema.Description, + Description: jsonSchema.Description, } - if len(schema.Properties) > 0 { + if len(jsonSchema.Properties) > 0 { docs.Properties = make(map[string]*Docs) } - for k, v := range schema.Properties { + for k, v := range jsonSchema.Properties { docs.Properties[k] = schemaToDocs(v) } - docs.Items = schemaToDocs(schema.Items) - if additionalProperties, ok := schema.AdditionalProperties.(*Schema); ok { + docs.Items = schemaToDocs(jsonSchema.Items) + if additionalProperties, ok := jsonSchema.AdditionalProperties.(*jsonschema.Schema); ok { docs.AdditionalProperties = schemaToDocs(additionalProperties) } return docs diff --git a/bundle/schema/docs_test.go b/bundle/schema/docs_test.go index 84d804b07..83ee681b0 100644 --- a/bundle/schema/docs_test.go +++ b/bundle/schema/docs_test.go @@ -4,30 +4,31 @@ import ( "encoding/json" "testing" + "github.com/databricks/cli/libs/jsonschema" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestSchemaToDocs(t *testing.T) { - schema := &Schema{ + jsonSchema := &jsonschema.Schema{ Type: "object", Description: "root doc", - Properties: map[string]*Schema{ + Properties: map[string]*jsonschema.Schema{ "foo": {Type: "number", Description: "foo doc"}, "bar": {Type: "string"}, "octave": { Type: "object", - AdditionalProperties: &Schema{Type: "number"}, + AdditionalProperties: &jsonschema.Schema{Type: "number"}, Description: "octave docs", }, "scales": { Type: "object", Description: "scale docs", - Items: &Schema{Type: "string"}, + Items: &jsonschema.Schema{Type: "string"}, }, }, } - docs := schemaToDocs(schema) + docs := schemaToDocs(jsonSchema) docsJson, err := json.MarshalIndent(docs, " ", " ") require.NoError(t, err) diff --git a/bundle/schema/openapi.go b/bundle/schema/openapi.go index 9b4b27dd9..b0d676576 100644 --- a/bundle/schema/openapi.go +++ b/bundle/schema/openapi.go @@ -5,17 +5,18 @@ import ( "fmt" "strings" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" ) type OpenapiReader struct { OpenapiSpec *openapi.Specification - Memo map[string]*Schema + Memo map[string]*jsonschema.Schema } const SchemaPathPrefix = "#/components/schemas/" -func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { +func (reader *OpenapiReader) readOpenapiSchema(path string) (*jsonschema.Schema, error) { schemaKey := strings.TrimPrefix(path, SchemaPathPrefix) // return early if we already have a computed schema @@ -35,7 +36,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { if err != nil { return nil, err } - jsonSchema := &Schema{} + jsonSchema := &jsonschema.Schema{} err = json.Unmarshal(bytes, jsonSchema) if err != nil { return nil, err @@ -50,7 +51,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { if err != nil { return nil, err } - additionalProperties := &Schema{} + additionalProperties := &jsonschema.Schema{} err = json.Unmarshal(b, additionalProperties) if err != nil { return nil, err @@ -65,7 +66,7 @@ func (reader *OpenapiReader) readOpenapiSchema(path string) (*Schema, error) { } // safe againt loops in refs -func (reader *OpenapiReader) safeResolveRefs(root *Schema, tracker *tracker) (*Schema, error) { +func (reader *OpenapiReader) safeResolveRefs(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) { if root.Reference == nil { return reader.traverseSchema(root, tracker) } @@ -100,9 +101,9 @@ func (reader *OpenapiReader) safeResolveRefs(root *Schema, tracker *tracker) (*S return root, err } -func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Schema, error) { +func (reader *OpenapiReader) traverseSchema(root *jsonschema.Schema, tracker *tracker) (*jsonschema.Schema, error) { // case primitive (or invalid) - if root.Type != Object && root.Type != Array { + if root.Type != jsonschema.ObjectType && root.Type != jsonschema.ArrayType { return root, nil } // only root references are resolved @@ -128,9 +129,9 @@ func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Sc root.Items = itemsSchema } // case map - additionionalProperties, ok := root.AdditionalProperties.(*Schema) - if ok && additionionalProperties != nil { - valueSchema, err := reader.safeResolveRefs(additionionalProperties, tracker) + additionalProperties, ok := root.AdditionalProperties.(*jsonschema.Schema) + if ok && additionalProperties != nil { + valueSchema, err := reader.safeResolveRefs(additionalProperties, tracker) if err != nil { return nil, err } @@ -139,7 +140,7 @@ func (reader *OpenapiReader) traverseSchema(root *Schema, tracker *tracker) (*Sc return root, nil } -func (reader *OpenapiReader) readResolvedSchema(path string) (*Schema, error) { +func (reader *OpenapiReader) readResolvedSchema(path string) (*jsonschema.Schema, error) { root, err := reader.readOpenapiSchema(path) if err != nil { return nil, err diff --git a/bundle/schema/openapi_test.go b/bundle/schema/openapi_test.go index 282fac8df..0d71fa440 100644 --- a/bundle/schema/openapi_test.go +++ b/bundle/schema/openapi_test.go @@ -4,6 +4,7 @@ import ( "encoding/json" "testing" + "github.com/databricks/cli/libs/jsonschema" "github.com/databricks/databricks-sdk-go/openapi" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -47,7 +48,7 @@ func TestReadSchemaForObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -105,7 +106,7 @@ func TestReadSchemaForArray(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -151,7 +152,7 @@ func TestReadSchemaForMap(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -200,7 +201,7 @@ func TestRootReferenceIsResolved(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -250,7 +251,7 @@ func TestSelfReferenceLoopErrors(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -284,7 +285,7 @@ func TestCrossReferenceLoopErrors(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -329,7 +330,7 @@ func TestReferenceResolutionForMapInObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) @@ -399,7 +400,7 @@ func TestReferenceResolutionForArrayInObject(t *testing.T) { spec := &openapi.Specification{} reader := &OpenapiReader{ OpenapiSpec: spec, - Memo: make(map[string]*Schema), + Memo: make(map[string]*jsonschema.Schema), } err := json.Unmarshal([]byte(specString), spec) require.NoError(t, err) diff --git a/bundle/schema/schema.go b/bundle/schema/schema.go index 7a55cbd2b..fee9b676a 100644 --- a/bundle/schema/schema.go +++ b/bundle/schema/schema.go @@ -5,41 +5,10 @@ import ( "fmt" "reflect" "strings" + + "github.com/databricks/cli/libs/jsonschema" ) -// defines schema for a json object -type Schema struct { - // Type of the object - Type JavascriptType `json:"type,omitempty"` - - // Description of the object. This is rendered as inline documentation in the - // IDE. This is manually injected here using schema.Docs - Description string `json:"description,omitempty"` - - // Schemas for the fields of an struct. The keys are the first json tag. - // The values are the schema for the type of the field - Properties map[string]*Schema `json:"properties,omitempty"` - - // The schema for all values of an array - Items *Schema `json:"items,omitempty"` - - // The schema for any properties not mentioned in the Schema.Properties field. - // this validates maps[string]any in bundle configuration - // OR - // A boolean type with value false. Setting false here validates that all - // properties in the config have been defined in the json schema as properties - // - // Its type during runtime will either be *Schema or bool - AdditionalProperties any `json:"additionalProperties,omitempty"` - - // Required properties for the object. Any fields missing the "omitempty" - // json tag will be included - Required []string `json:"required,omitempty"` - - // URI to a json schema - Reference *string `json:"$ref,omitempty"` -} - // This function translates golang types into json schema. Here is the mapping // between json schema types and golang types // @@ -61,7 +30,7 @@ type Schema struct { // // - []MyStruct -> {type: object, properties: {}, additionalProperties: false} // for details visit: https://json-schema.org/understanding-json-schema/reference/object.html#properties -func New(golangType reflect.Type, docs *Docs) (*Schema, error) { +func New(golangType reflect.Type, docs *Docs) (*jsonschema.Schema, error) { tracker := newTracker() schema, err := safeToSchema(golangType, docs, "", tracker) if err != nil { @@ -70,39 +39,28 @@ func New(golangType reflect.Type, docs *Docs) (*Schema, error) { return schema, nil } -type JavascriptType string - -const ( - Invalid JavascriptType = "invalid" - Boolean JavascriptType = "boolean" - String JavascriptType = "string" - Number JavascriptType = "number" - Object JavascriptType = "object" - Array JavascriptType = "array" -) - -func javascriptType(golangType reflect.Type) (JavascriptType, error) { +func jsonSchemaType(golangType reflect.Type) (jsonschema.Type, error) { switch golangType.Kind() { case reflect.Bool: - return Boolean, nil + return jsonschema.BooleanType, nil case reflect.String: - return String, nil + return jsonschema.StringType, nil case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Float32, reflect.Float64: - return Number, nil + return jsonschema.NumberType, nil case reflect.Struct: - return Object, nil + return jsonschema.ObjectType, nil case reflect.Map: if golangType.Key().Kind() != reflect.String { - return Invalid, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind()) + return jsonschema.InvalidType, fmt.Errorf("only strings map keys are valid. key type: %v", golangType.Key().Kind()) } - return Object, nil + return jsonschema.ObjectType, nil case reflect.Array, reflect.Slice: - return Array, nil + return jsonschema.ArrayType, nil default: - return Invalid, fmt.Errorf("unhandled golang type: %s", golangType) + return jsonschema.InvalidType, fmt.Errorf("unhandled golang type: %s", golangType) } } @@ -121,7 +79,7 @@ func javascriptType(golangType reflect.Type) (JavascriptType, error) { // like array, map or no json tags // // - tracker: Keeps track of types / traceIds seen during recursive traversal -func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*Schema, error) { +func safeToSchema(golangType reflect.Type, docs *Docs, traceId string, tracker *tracker) (*jsonschema.Schema, error) { // WE ERROR OUT IF THERE ARE CYCLES IN THE JSON SCHEMA // There are mechanisms to deal with cycles though recursive identifiers in json // schema. However if we use them, we would need to make sure we are able to detect @@ -174,29 +132,29 @@ func getStructFields(golangType reflect.Type) []reflect.StructField { return fields } -func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, error) { +func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschema.Schema, error) { // *Struct and Struct generate identical json schemas if golangType.Kind() == reflect.Pointer { return safeToSchema(golangType.Elem(), docs, "", tracker) } if golangType.Kind() == reflect.Interface { - return &Schema{}, nil + return &jsonschema.Schema{}, nil } - rootJavascriptType, err := javascriptType(golangType) + rootJavascriptType, err := jsonSchemaType(golangType) if err != nil { return nil, err } - schema := &Schema{Type: rootJavascriptType} + jsonSchema := &jsonschema.Schema{Type: rootJavascriptType} if docs != nil { - schema.Description = docs.Description + jsonSchema.Description = docs.Description } // case array/slice if golangType.Kind() == reflect.Array || golangType.Kind() == reflect.Slice { elemGolangType := golangType.Elem() - elemJavascriptType, err := javascriptType(elemGolangType) + elemJavascriptType, err := jsonSchemaType(elemGolangType) if err != nil { return nil, err } @@ -208,7 +166,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e if err != nil { return nil, err } - schema.Items = &Schema{ + jsonSchema.Items = &jsonschema.Schema{ Type: elemJavascriptType, Properties: elemProps.Properties, AdditionalProperties: elemProps.AdditionalProperties, @@ -226,7 +184,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e if docs != nil { childDocs = docs.AdditionalProperties } - schema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker) + jsonSchema.AdditionalProperties, err = safeToSchema(golangType.Elem(), childDocs, "", tracker) if err != nil { return nil, err } @@ -235,7 +193,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e // case struct if golangType.Kind() == reflect.Struct { children := getStructFields(golangType) - properties := map[string]*Schema{} + properties := map[string]*jsonschema.Schema{} required := []string{} for _, child := range children { bundleTag := child.Tag.Get("bundle") @@ -281,10 +239,10 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*Schema, e properties[childName] = fieldProps } - schema.AdditionalProperties = false - schema.Properties = properties - schema.Required = required + jsonSchema.AdditionalProperties = false + jsonSchema.Properties = properties + jsonSchema.Required = required } - return schema, nil + return jsonSchema, nil } diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go new file mode 100644 index 000000000..49e31bb74 --- /dev/null +++ b/libs/jsonschema/schema.go @@ -0,0 +1,49 @@ +package jsonschema + +// defines schema for a json object +type Schema struct { + // Type of the object + Type Type `json:"type,omitempty"` + + // Description of the object. This is rendered as inline documentation in the + // IDE. This is manually injected here using schema.Docs + Description string `json:"description,omitempty"` + + // Schemas for the fields of an struct. The keys are the first json tag. + // The values are the schema for the type of the field + Properties map[string]*Schema `json:"properties,omitempty"` + + // The schema for all values of an array + Items *Schema `json:"items,omitempty"` + + // The schema for any properties not mentioned in the Schema.Properties field. + // this validates maps[string]any in bundle configuration + // OR + // A boolean type with value false. Setting false here validates that all + // properties in the config have been defined in the json schema as properties + // + // Its type during runtime will either be *Schema or bool + AdditionalProperties any `json:"additionalProperties,omitempty"` + + // Required properties for the object. Any fields missing the "omitempty" + // json tag will be included + Required []string `json:"required,omitempty"` + + // URI to a json schema + Reference *string `json:"$ref,omitempty"` + + // Default value for the property / object + Default any `json:"default,omitempty"` +} + +type Type string + +const ( + InvalidType Type = "invalid" + BooleanType Type = "boolean" + StringType Type = "string" + NumberType Type = "number" + ObjectType Type = "object" + ArrayType Type = "array" + IntegerType Type = "integer" +) diff --git a/libs/template/schema.go b/libs/template/schema.go new file mode 100644 index 000000000..957cd66c7 --- /dev/null +++ b/libs/template/schema.go @@ -0,0 +1,121 @@ +package template + +import ( + "encoding/json" + "fmt" + "os" + + "github.com/databricks/cli/libs/jsonschema" +) + +// function to check whether a float value represents an integer +func isIntegerValue(v float64) bool { + return v == float64(int(v)) +} + +// cast value to integer for config values that are floats but are supposed to be +// integers according to the schema +// +// Needed because the default json unmarshaler for maps converts all numbers to floats +func castFloatConfigValuesToInt(config map[string]any, jsonSchema *jsonschema.Schema) error { + for k, v := range config { + // error because all config keys should be defined in schema too + fieldInfo, ok := jsonSchema.Properties[k] + if !ok { + return fmt.Errorf("%s is not defined as an input parameter for the template", k) + } + // skip non integer fields + if fieldInfo.Type != jsonschema.IntegerType { + continue + } + + // convert floating point type values to integer + switch floatVal := v.(type) { + case float32: + if !isIntegerValue(float64(floatVal)) { + return fmt.Errorf("expected %s to have integer value but it is %v", k, v) + } + config[k] = int(floatVal) + case float64: + if !isIntegerValue(floatVal) { + return fmt.Errorf("expected %s to have integer value but it is %v", k, v) + } + config[k] = int(floatVal) + } + } + return nil +} + +func assignDefaultConfigValues(config map[string]any, schema *jsonschema.Schema) error { + for k, v := range schema.Properties { + if _, ok := config[k]; ok { + continue + } + if v.Default == nil { + return fmt.Errorf("input parameter %s is not defined in config", k) + } + config[k] = v.Default + } + return nil +} + +func validateConfigValueTypes(config map[string]any, schema *jsonschema.Schema) error { + // validate types defined in config + for k, v := range config { + fieldInfo, ok := schema.Properties[k] + if !ok { + return fmt.Errorf("%s is not defined as an input parameter for the template", k) + } + err := validateType(v, fieldInfo.Type) + if err != nil { + return fmt.Errorf("incorrect type for %s. %w", k, err) + } + } + return nil +} + +func ReadSchema(path string) (*jsonschema.Schema, error) { + schemaBytes, err := os.ReadFile(path) + if err != nil { + return nil, err + } + schema := &jsonschema.Schema{} + err = json.Unmarshal(schemaBytes, schema) + if err != nil { + return nil, err + } + return schema, nil +} + +func ReadConfig(path string, jsonSchema *jsonschema.Schema) (map[string]any, error) { + // Read config file + var config map[string]any + b, err := os.ReadFile(path) + if err != nil { + return nil, err + } + err = json.Unmarshal(b, &config) + if err != nil { + return nil, err + } + + // Assign default value to any fields that do not have a value yet + err = assignDefaultConfigValues(config, jsonSchema) + if err != nil { + return nil, err + } + + // cast any fields that are supposed to be integers. The json unmarshalling + // for a generic map converts all numbers to floating point + err = castFloatConfigValuesToInt(config, jsonSchema) + if err != nil { + return nil, err + } + + // validate config according to schema + err = validateConfigValueTypes(config, jsonSchema) + if err != nil { + return nil, err + } + return config, nil +} diff --git a/libs/template/schema_test.go b/libs/template/schema_test.go new file mode 100644 index 000000000..ba30f81a9 --- /dev/null +++ b/libs/template/schema_test.go @@ -0,0 +1,274 @@ +package template + +import ( + "encoding/json" + "testing" + + "github.com/databricks/cli/libs/jsonschema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testSchema(t *testing.T) *jsonschema.Schema { + schemaJson := `{ + "properties": { + "int_val": { + "type": "integer" + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string" + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + return &jsonSchema +} + +func TestTemplateSchemaIsInteger(t *testing.T) { + assert.False(t, isIntegerValue(1.1)) + assert.False(t, isIntegerValue(0.1)) + assert.False(t, isIntegerValue(-0.1)) + + assert.True(t, isIntegerValue(-1.0)) + assert.True(t, isIntegerValue(0.0)) + assert.True(t, isIntegerValue(2.0)) +} + +func TestTemplateSchemaCastFloatToInt(t *testing.T) { + // define schema for config + jsonSchema := testSchema(t) + + // define the config + configJson := `{ + "int_val": 1, + "float_val": 2, + "bool_val": true, + "string_val": "main hoon na" + }` + var config map[string]any + err := json.Unmarshal([]byte(configJson), &config) + require.NoError(t, err) + + // assert types before casting, checking that the integer was indeed loaded + // as a floating point + assert.IsType(t, float64(0), config["int_val"]) + assert.IsType(t, float64(0), config["float_val"]) + assert.IsType(t, true, config["bool_val"]) + assert.IsType(t, "abc", config["string_val"]) + + err = castFloatConfigValuesToInt(config, jsonSchema) + require.NoError(t, err) + + // assert type after casting, that the float value was converted to an integer + // for int_val. + assert.IsType(t, int(0), config["int_val"]) + assert.IsType(t, float64(0), config["float_val"]) + assert.IsType(t, true, config["bool_val"]) + assert.IsType(t, "abc", config["string_val"]) +} + +func TestTemplateSchemaCastFloatToIntFailsForUnknownTypes(t *testing.T) { + // define schema for config + schemaJson := `{ + "properties": { + "foo": { + "type": "integer" + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + + // define the config + configJson := `{ + "bar": true + }` + var config map[string]any + err = json.Unmarshal([]byte(configJson), &config) + require.NoError(t, err) + + err = castFloatConfigValuesToInt(config, &jsonSchema) + assert.ErrorContains(t, err, "bar is not defined as an input parameter for the template") +} + +func TestTemplateSchemaCastFloatToIntFailsWhenWithNonIntValues(t *testing.T) { + // define schema for config + schemaJson := `{ + "properties": { + "foo": { + "type": "integer" + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + + // define the config + configJson := `{ + "foo": 1.1 + }` + var config map[string]any + err = json.Unmarshal([]byte(configJson), &config) + require.NoError(t, err) + + err = castFloatConfigValuesToInt(config, &jsonSchema) + assert.ErrorContains(t, err, "expected foo to have integer value but it is 1.1") +} + +func TestTemplateSchemaValidateType(t *testing.T) { + // assert validation passing + err := validateType(int(0), jsonschema.IntegerType) + assert.NoError(t, err) + err = validateType(int32(1), jsonschema.IntegerType) + assert.NoError(t, err) + err = validateType(int64(1), jsonschema.IntegerType) + assert.NoError(t, err) + + err = validateType(float32(1.1), jsonschema.NumberType) + assert.NoError(t, err) + err = validateType(float64(1.2), jsonschema.NumberType) + assert.NoError(t, err) + err = validateType(int(1), jsonschema.NumberType) + assert.NoError(t, err) + + err = validateType(false, jsonschema.BooleanType) + assert.NoError(t, err) + + err = validateType("abc", jsonschema.StringType) + assert.NoError(t, err) + + // assert validation failing for integers + err = validateType(float64(1.2), jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is 1.2") + err = validateType(true, jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is true") + err = validateType("abc", jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") + + // assert validation failing for floats + err = validateType(true, jsonschema.NumberType) + assert.ErrorContains(t, err, "expected type float, but value is true") + err = validateType("abc", jsonschema.NumberType) + assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") + + // assert validation failing for boolean + err = validateType(int(1), jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType(float64(1), jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType("abc", jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") + + // assert validation failing for string + err = validateType(int(1), jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(float64(1), jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(false, jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is false") +} + +func TestTemplateSchemaValidateConfig(t *testing.T) { + // define schema for config + jsonSchema := testSchema(t) + + // define the config + config := map[string]any{ + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abc", + } + + err := validateConfigValueTypes(config, jsonSchema) + assert.NoError(t, err) +} + +func TestTemplateSchemaValidateConfigFailsForUnknownField(t *testing.T) { + // define schema for config + jsonSchema := testSchema(t) + + // define the config + config := map[string]any{ + "foo": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abc", + } + + err := validateConfigValueTypes(config, jsonSchema) + assert.ErrorContains(t, err, "foo is not defined as an input parameter for the template") +} + +func TestTemplateSchemaValidateConfigFailsForWhenIncorrectTypes(t *testing.T) { + // define schema for config + jsonSchema := testSchema(t) + + // define the config + config := map[string]any{ + "int_val": 1, + "float_val": 1.1, + "bool_val": "true", + "string_val": "abc", + } + + err := validateConfigValueTypes(config, jsonSchema) + assert.ErrorContains(t, err, "incorrect type for bool_val. expected type boolean, but value is \"true\"") +} + +func TestTemplateSchemaValidateConfigFailsForWhenMissingInputParams(t *testing.T) { + // define schema for config + schemaJson := `{ + "properties": { + "int_val": { + "type": "integer" + }, + "string_val": { + "type": "string" + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + + // define the config + config := map[string]any{ + "int_val": 1, + } + + err = assignDefaultConfigValues(config, &jsonSchema) + assert.ErrorContains(t, err, "input parameter string_val is not defined in config") +} + +func TestTemplateDefaultAssignment(t *testing.T) { + // define schema for config + schemaJson := `{ + "properties": { + "foo": { + "type": "integer", + "default": 1 + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + + // define the config + config := map[string]any{} + + err = assignDefaultConfigValues(config, &jsonSchema) + assert.NoError(t, err) + assert.Equal(t, 1.0, config["foo"]) +} diff --git a/libs/template/validators.go b/libs/template/validators.go new file mode 100644 index 000000000..0ae41e461 --- /dev/null +++ b/libs/template/validators.go @@ -0,0 +1,60 @@ +package template + +import ( + "fmt" + "reflect" + + "github.com/databricks/cli/libs/jsonschema" + "golang.org/x/exp/slices" +) + +type validator func(v any) error + +func validateType(v any, fieldType jsonschema.Type) error { + validateFunc, ok := validators[fieldType] + if !ok { + return nil + } + return validateFunc(v) +} + +func validateString(v any) error { + if _, ok := v.(string); !ok { + return fmt.Errorf("expected type string, but value is %#v", v) + } + return nil +} + +func validateBoolean(v any) error { + if _, ok := v.(bool); !ok { + return fmt.Errorf("expected type boolean, but value is %#v", v) + } + return nil +} + +func validateNumber(v any) error { + if !slices.Contains([]reflect.Kind{reflect.Float32, reflect.Float64, reflect.Int, + reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, + reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64}, + reflect.TypeOf(v).Kind()) { + return fmt.Errorf("expected type float, but value is %#v", v) + } + return nil +} + +func validateInteger(v any) error { + if !slices.Contains([]reflect.Kind{reflect.Int, reflect.Int8, reflect.Int16, + reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, + reflect.Uint32, reflect.Uint64}, + reflect.TypeOf(v).Kind()) { + return fmt.Errorf("expected type integer, but value is %#v", v) + } + return nil +} + +var validators map[jsonschema.Type]validator = map[jsonschema.Type]validator{ + jsonschema.StringType: validateString, + jsonschema.BooleanType: validateBoolean, + jsonschema.IntegerType: validateInteger, + jsonschema.NumberType: validateNumber, +} diff --git a/libs/template/validators_test.go b/libs/template/validators_test.go new file mode 100644 index 000000000..f0cbf8a14 --- /dev/null +++ b/libs/template/validators_test.go @@ -0,0 +1,76 @@ +package template + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidatorString(t *testing.T) { + err := validateString("abc") + assert.NoError(t, err) + + err = validateString(1) + assert.ErrorContains(t, err, "expected type string, but value is 1") + + err = validateString(true) + assert.ErrorContains(t, err, "expected type string, but value is true") + + err = validateString("false") + assert.NoError(t, err) +} + +func TestValidatorBoolean(t *testing.T) { + err := validateBoolean(true) + assert.NoError(t, err) + + err = validateBoolean(1) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + + err = validateBoolean("abc") + assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") + + err = validateBoolean("false") + assert.ErrorContains(t, err, "expected type boolean, but value is \"false\"") +} + +func TestValidatorNumber(t *testing.T) { + err := validateNumber(true) + assert.ErrorContains(t, err, "expected type float, but value is true") + + err = validateNumber(int32(1)) + require.NoError(t, err) + + err = validateNumber(int64(1)) + require.NoError(t, err) + + err = validateNumber(float32(1)) + assert.NoError(t, err) + + err = validateNumber(float64(1)) + assert.NoError(t, err) + + err = validateNumber("abc") + assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") +} + +func TestValidatorInt(t *testing.T) { + err := validateInteger(true) + assert.ErrorContains(t, err, "expected type integer, but value is true") + + err = validateInteger(int32(1)) + assert.NoError(t, err) + + err = validateInteger(int64(1)) + assert.NoError(t, err) + + err = validateInteger(float32(1)) + assert.ErrorContains(t, err, "expected type integer, but value is 1") + + err = validateInteger(float64(1)) + assert.ErrorContains(t, err, "expected type integer, but value is 1") + + err = validateInteger("abc") + assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") +} From d9ab465ff96fd56d62e05712a7693347c19ba9dc Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 2 Aug 2023 11:43:42 +0200 Subject: [PATCH 038/139] Infer host from profile during login (#629) ## Changes A pretty annoying part of the current CLI experience is that when logging in with `databricks auth login`, you always need to type the name of the host. This seems unnecessary if you have already logged into a host before, since the CLI can read the previous host from your `.databrickscfg` file. This change handles this case by setting the host if unspecified to the host in the corresponding profile. Combined with autocomplete, this makes the login process simple: ``` databricks auth login --profile prof ``` ## Tests Logged in to an existing profile by running the above command (but for a real profile I had). --- cmd/auth/login.go | 47 ++++++++++++++++++++++++++++++----------------- 1 file changed, 30 insertions(+), 17 deletions(-) diff --git a/cmd/auth/login.go b/cmd/auth/login.go index fcb0e0ddb..e248118ae 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -43,8 +43,36 @@ func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() + + var profileName string + profileFlag := cmd.Flag("profile") + if profileFlag != nil && profileFlag.Value.String() != "" { + profileName = profileFlag.Value.String() + } else { + prompt := cmdio.Prompt(ctx) + prompt.Label = "Databricks Profile Name" + prompt.Default = persistentAuth.ProfileName() + prompt.AllowEdit = true + profile, err := prompt.Run() + if err != nil { + return err + } + profileName = profile + } + + // If the chosen profile has a hostname and the user hasn't specified a host, infer the host from the profile. + _, profiles, err := databrickscfg.LoadProfiles(databrickscfg.DefaultPath, func(p databrickscfg.Profile) bool { + return p.Name == profileName + }) + if err != nil { + return err + } if persistentAuth.Host == "" { - configureHost(ctx, persistentAuth, args, 0) + if len(profiles) > 0 && profiles[0].Host != "" { + persistentAuth.Host = profiles[0].Host + } else { + configureHost(ctx, persistentAuth, args, 0) + } } defer persistentAuth.Close() @@ -66,22 +94,7 @@ func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { ctx, cancel := context.WithTimeout(ctx, loginTimeout) defer cancel() - var profileName string - profileFlag := cmd.Flag("profile") - if profileFlag != nil && profileFlag.Value.String() != "" { - profileName = profileFlag.Value.String() - } else { - prompt := cmdio.Prompt(ctx) - prompt.Label = "Databricks Profile Name" - prompt.Default = persistentAuth.ProfileName() - prompt.AllowEdit = true - profile, err := prompt.Run() - if err != nil { - return err - } - profileName = profile - } - err := persistentAuth.Challenge(ctx) + err = persistentAuth.Challenge(ctx) if err != nil { return err } From 31b178ad6cbc8c468132d94dcd6e18004de04936 Mon Sep 17 00:00:00 2001 From: Kartik Gupta <88345179+kartikgupta-db@users.noreply.github.com> Date: Wed, 2 Aug 2023 12:18:19 +0200 Subject: [PATCH 039/139] Add DATABRICKS_BUNDLE_INCLUDE_PATHS to specify include paths through env vars (#591) ## Changes * This PR adds `DATABRICKS_BUNDLE_INCLUDE_PATHS` environment variable, so that we can specify including bundle config files, which we do not want to commit. These could potentially be local dev overrides or overrides by our tools - like the VS Code extension * We always add these include paths to the "include" field. ## Tests * [x] Unit tests --- .../config/mutator/process_root_includes.go | 24 ++++++++++++ .../mutator/process_root_includes_test.go | 39 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index f3717ce01..1b0faa77e 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -3,6 +3,7 @@ package mutator import ( "context" "fmt" + "os" "path/filepath" "strings" @@ -11,6 +12,17 @@ import ( "golang.org/x/exp/slices" ) +const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES" + +// Get extra include paths from environment variable +func GetExtraIncludePaths() []string { + value, exists := os.LookupEnv(ExtraIncludePathsKey) + if !exists { + return nil + } + return strings.Split(value, string(os.PathListSeparator)) +} + type processRootIncludes struct{} // ProcessRootIncludes expands the patterns in the configuration's include list @@ -37,6 +49,18 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error // This is stored in the bundle configuration for observability. var files []string + // Converts extra include paths from environment variable to relative paths + for _, extraIncludePath := range GetExtraIncludePaths() { + if filepath.IsAbs(extraIncludePath) { + rel, err := filepath.Rel(b.Config.Path, extraIncludePath) + if err != nil { + return fmt.Errorf("unable to include file '%s': %w", extraIncludePath, err) + } + extraIncludePath = rel + } + b.Config.Include = append(b.Config.Include, extraIncludePath) + } + // For each glob, find all files to load. // Ordering of the list of globs is maintained in the output. // For matches that appear in multiple globs, only the first is kept. diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 9ca5335ac..449e3a02c 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -2,7 +2,9 @@ package mutator_test import ( "context" + "fmt" "os" + "path" "path/filepath" "runtime" "testing" @@ -122,3 +124,40 @@ func TestProcessRootIncludesNotExists(t *testing.T) { require.Error(t, err) assert.Contains(t, err.Error(), "notexist.yml defined in 'include' section does not match any files") } + +func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + os.Setenv(mutator.ExtraIncludePathsKey, path.Join(rootPath, testYamlName)) + t.Cleanup(func() { + os.Unsetenv(mutator.ExtraIncludePathsKey) + }) + + bundle := &bundle.Bundle{ + Config: config.Root{ + Path: rootPath, + }, + } + + err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Contains(t, bundle.Config.Include, testYamlName) +} + +func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { + rootPath := t.TempDir() + testYamlName := "extra_include_path.yml" + touch(t, rootPath, testYamlName) + t.Setenv(mutator.ExtraIncludePathsKey, fmt.Sprintf("%s%s%s", path.Join(rootPath, testYamlName), string(os.PathListSeparator), path.Join(rootPath, testYamlName))) + + bundle := &bundle.Bundle{ + Config: config.Root{ + Path: rootPath, + }, + } + + err := mutator.ProcessRootIncludes().Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Equal(t, []string{testYamlName}, bundle.Config.Include) +} From 3140a8feef26ccccb7197f481e0fb9a7ef1fc3d3 Mon Sep 17 00:00:00 2001 From: Kartik Gupta <88345179+kartikgupta-db@users.noreply.github.com> Date: Wed, 2 Aug 2023 19:22:47 +0200 Subject: [PATCH 040/139] Initialise a empty default bundle if BUNDLE_ROOT and DATABRICKS_BUNDLE_INCLUDES env vars are present (#604) ## Changes ## Tests --- bundle/bundle.go | 17 ++++++++ .../config/mutator/process_root_includes.go | 4 +- .../mutator/process_root_includes_test.go | 6 +-- bundle/root_test.go | 43 +++++++++++++++++++ 4 files changed, 64 insertions(+), 6 deletions(-) diff --git a/bundle/bundle.go b/bundle/bundle.go index 81fdfd4a8..0147883ca 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -43,10 +43,27 @@ type Bundle struct { AutoApprove bool } +const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES" + func Load(path string) (*Bundle, error) { bundle := &Bundle{} + stat, err := os.Stat(path) + if err != nil { + return nil, err + } configFile, err := config.FileNames.FindInPath(path) if err != nil { + _, hasIncludePathEnv := os.LookupEnv(ExtraIncludePathsKey) + _, hasBundleRootEnv := os.LookupEnv(envBundleRoot) + if hasIncludePathEnv && hasBundleRootEnv && stat.IsDir() { + bundle.Config = config.Root{ + Path: path, + Bundle: config.Bundle{ + Name: filepath.Base(path), + }, + } + return bundle, nil + } return nil, err } err = bundle.Config.Load(configFile) diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 1b0faa77e..c2dffc6ee 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -12,11 +12,9 @@ import ( "golang.org/x/exp/slices" ) -const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES" - // Get extra include paths from environment variable func GetExtraIncludePaths() []string { - value, exists := os.LookupEnv(ExtraIncludePathsKey) + value, exists := os.LookupEnv(bundle.ExtraIncludePathsKey) if !exists { return nil } diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 449e3a02c..1ce094bc3 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -129,9 +129,9 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { rootPath := t.TempDir() testYamlName := "extra_include_path.yml" touch(t, rootPath, testYamlName) - os.Setenv(mutator.ExtraIncludePathsKey, path.Join(rootPath, testYamlName)) + os.Setenv(bundle.ExtraIncludePathsKey, path.Join(rootPath, testYamlName)) t.Cleanup(func() { - os.Unsetenv(mutator.ExtraIncludePathsKey) + os.Unsetenv(bundle.ExtraIncludePathsKey) }) bundle := &bundle.Bundle{ @@ -149,7 +149,7 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { rootPath := t.TempDir() testYamlName := "extra_include_path.yml" touch(t, rootPath, testYamlName) - t.Setenv(mutator.ExtraIncludePathsKey, fmt.Sprintf("%s%s%s", path.Join(rootPath, testYamlName), string(os.PathListSeparator), path.Join(rootPath, testYamlName))) + t.Setenv(bundle.ExtraIncludePathsKey, fmt.Sprintf("%s%s%s", path.Join(rootPath, testYamlName), string(os.PathListSeparator), path.Join(rootPath, testYamlName))) bundle := &bundle.Bundle{ Config: config.Root{ diff --git a/bundle/root_test.go b/bundle/root_test.go index 2f8304921..e85c4fdcb 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -102,3 +103,45 @@ func TestRootLookupError(t *testing.T) { _, err := mustGetRoot() require.ErrorContains(t, err, "unable to locate bundle root") } + +func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { + chdir(t, filepath.Join(".", "tests", "basic")) + t.Setenv(ExtraIncludePathsKey, "test") + + bundle, err := MustLoad() + assert.NoError(t, err) + assert.Equal(t, "basic", bundle.Config.Bundle.Name) + + cwd, err := os.Getwd() + assert.NoError(t, err) + assert.Equal(t, cwd, bundle.Config.Path) +} + +func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { + dir := t.TempDir() + chdir(t, dir) + t.Setenv(envBundleRoot, dir) + t.Setenv(ExtraIncludePathsKey, "test") + + bundle, err := MustLoad() + assert.NoError(t, err) + assert.Equal(t, dir, bundle.Config.Path) +} + +func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { + dir := t.TempDir() + chdir(t, dir) + t.Setenv(ExtraIncludePathsKey, "test") + + _, err := MustLoad() + assert.Error(t, err) +} + +func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) { + dir := t.TempDir() + chdir(t, dir) + t.Setenv(envBundleRoot, dir) + + _, err := MustLoad() + assert.Error(t, err) +} From ce9c9148c96449a57b476659311c33bb843adc67 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 3 Aug 2023 13:20:30 +0200 Subject: [PATCH 041/139] Regenerate bundle resource structs from latest terraform provider (#633) ## Changes This PR: 1. Regenerates the terraform provider structs based off the latest terraform provider version: 1.22.0 2. Adds a debug launch configuration for regenerating the schema ## Tests Existing unit tests --- bundle/internal/tf/codegen/.gitignore | 1 + bundle/internal/tf/schema/data_source_job.go | 131 +++++++++++------ .../tf/schema/data_source_metastore.go | 30 ++++ .../tf/schema/data_source_metastores.go | 8 ++ .../tf/schema/data_source_sql_warehouse.go | 2 +- bundle/internal/tf/schema/data_sources.go | 4 + .../resource_access_control_rule_set.go | 15 ++ bundle/internal/tf/schema/resource_group.go | 1 + bundle/internal/tf/schema/resource_job.go | 132 ++++++++++++------ .../tf/schema/resource_service_principal.go | 1 + bundle/internal/tf/schema/resource_user.go | 1 + bundle/internal/tf/schema/resources.go | 2 + 12 files changed, 241 insertions(+), 87 deletions(-) create mode 100644 bundle/internal/tf/schema/data_source_metastore.go create mode 100644 bundle/internal/tf/schema/data_source_metastores.go create mode 100644 bundle/internal/tf/schema/resource_access_control_rule_set.go diff --git a/bundle/internal/tf/codegen/.gitignore b/bundle/internal/tf/codegen/.gitignore index d59e6e95f..72f05fc49 100644 --- a/bundle/internal/tf/codegen/.gitignore +++ b/bundle/internal/tf/codegen/.gitignore @@ -1,2 +1,3 @@ /codegen /tmp +/.vscode diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index a633bd3a3..6d2d1aa9b 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -25,19 +25,37 @@ type DataSourceJobJobSettingsSettingsDbtTask struct { } type DataSourceJobJobSettingsSettingsEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type DataSourceJobJobSettingsSettingsGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *DataSourceJobJobSettingsSettingsGitSourceJobSource `json:"job_source,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsHealth struct { + Rules []DataSourceJobJobSettingsSettingsHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsJobClusterNewClusterAutoscale struct { @@ -384,7 +402,8 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct { } type DataSourceJobJobSettingsSettingsPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsPythonWheelTask struct { @@ -445,11 +464,22 @@ type DataSourceJobJobSettingsSettingsTaskDependsOn struct { } type DataSourceJobJobSettingsSettingsTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type DataSourceJobJobSettingsSettingsTaskHealth struct { + Rules []DataSourceJobJobSettingsSettingsTaskHealthRules `json:"rules,omitempty"` } type DataSourceJobJobSettingsSettingsTaskLibraryCran struct { @@ -634,8 +664,15 @@ type DataSourceJobJobSettingsSettingsTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct { @@ -702,29 +739,31 @@ type DataSourceJobJobSettingsSettingsTaskSqlTask struct { } type DataSourceJobJobSettingsSettingsTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` - NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *DataSourceJobJobSettingsSettingsTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *DataSourceJobJobSettingsSettingsTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []DataSourceJobJobSettingsSettingsTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *DataSourceJobJobSettingsSettingsTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *DataSourceJobJobSettingsSettingsTaskHealth `json:"health,omitempty"` + Library []DataSourceJobJobSettingsSettingsTaskLibrary `json:"library,omitempty"` + NewCluster *DataSourceJobJobSettingsSettingsTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *DataSourceJobJobSettingsSettingsTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *DataSourceJobJobSettingsSettingsTaskSqlTask `json:"sql_task,omitempty"` } type DataSourceJobJobSettingsSettingsTriggerFileArrival struct { @@ -738,6 +777,10 @@ type DataSourceJobJobSettingsSettingsTrigger struct { FileArrival *DataSourceJobJobSettingsSettingsTriggerFileArrival `json:"file_arrival,omitempty"` } +type DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,9 +794,10 @@ type DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess struct { } type DataSourceJobJobSettingsSettingsWebhookNotifications struct { - OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []DataSourceJobJobSettingsSettingsWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []DataSourceJobJobSettingsSettingsWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []DataSourceJobJobSettingsSettingsWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []DataSourceJobJobSettingsSettingsWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type DataSourceJobJobSettingsSettings struct { @@ -771,6 +815,7 @@ type DataSourceJobJobSettingsSettings struct { DbtTask *DataSourceJobJobSettingsSettingsDbtTask `json:"dbt_task,omitempty"` EmailNotifications *DataSourceJobJobSettingsSettingsEmailNotifications `json:"email_notifications,omitempty"` GitSource *DataSourceJobJobSettingsSettingsGitSource `json:"git_source,omitempty"` + Health *DataSourceJobJobSettingsSettingsHealth `json:"health,omitempty"` JobCluster []DataSourceJobJobSettingsSettingsJobCluster `json:"job_cluster,omitempty"` Library []DataSourceJobJobSettingsSettingsLibrary `json:"library,omitempty"` NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/data_source_metastore.go b/bundle/internal/tf/schema/data_source_metastore.go new file mode 100644 index 000000000..dd14be81c --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastore.go @@ -0,0 +1,30 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastoreMetastoreInfo struct { + Cloud string `json:"cloud,omitempty"` + CreatedAt int `json:"created_at,omitempty"` + CreatedBy string `json:"created_by,omitempty"` + DefaultDataAccessConfigId string `json:"default_data_access_config_id,omitempty"` + DeltaSharingOrganizationName string `json:"delta_sharing_organization_name,omitempty"` + DeltaSharingRecipientTokenLifetimeInSeconds int `json:"delta_sharing_recipient_token_lifetime_in_seconds,omitempty"` + DeltaSharingScope string `json:"delta_sharing_scope,omitempty"` + GlobalMetastoreId string `json:"global_metastore_id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name,omitempty"` + Owner string `json:"owner,omitempty"` + PrivilegeModelVersion string `json:"privilege_model_version,omitempty"` + Region string `json:"region,omitempty"` + StorageRoot string `json:"storage_root,omitempty"` + StorageRootCredentialId string `json:"storage_root_credential_id,omitempty"` + StorageRootCredentialName string `json:"storage_root_credential_name,omitempty"` + UpdatedAt int `json:"updated_at,omitempty"` + UpdatedBy string `json:"updated_by,omitempty"` +} + +type DataSourceMetastore struct { + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id"` + MetastoreInfo *DataSourceMetastoreMetastoreInfo `json:"metastore_info,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_metastores.go b/bundle/internal/tf/schema/data_source_metastores.go new file mode 100644 index 000000000..c2b6854ee --- /dev/null +++ b/bundle/internal/tf/schema/data_source_metastores.go @@ -0,0 +1,8 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type DataSourceMetastores struct { + Id string `json:"id,omitempty"` + Ids map[string]string `json:"ids,omitempty"` +} diff --git a/bundle/internal/tf/schema/data_source_sql_warehouse.go b/bundle/internal/tf/schema/data_source_sql_warehouse.go index f90cc9dd6..218591d09 100644 --- a/bundle/internal/tf/schema/data_source_sql_warehouse.go +++ b/bundle/internal/tf/schema/data_source_sql_warehouse.go @@ -29,7 +29,7 @@ type DataSourceSqlWarehouse struct { DataSourceId string `json:"data_source_id,omitempty"` EnablePhoton bool `json:"enable_photon,omitempty"` EnableServerlessCompute bool `json:"enable_serverless_compute,omitempty"` - Id string `json:"id"` + Id string `json:"id,omitempty"` InstanceProfileArn string `json:"instance_profile_arn,omitempty"` JdbcUrl string `json:"jdbc_url,omitempty"` MaxNumClusters int `json:"max_num_clusters,omitempty"` diff --git a/bundle/internal/tf/schema/data_sources.go b/bundle/internal/tf/schema/data_sources.go index 6fbcf680b..79658298f 100644 --- a/bundle/internal/tf/schema/data_sources.go +++ b/bundle/internal/tf/schema/data_sources.go @@ -18,6 +18,8 @@ type DataSources struct { InstancePool map[string]*DataSourceInstancePool `json:"databricks_instance_pool,omitempty"` Job map[string]*DataSourceJob `json:"databricks_job,omitempty"` Jobs map[string]*DataSourceJobs `json:"databricks_jobs,omitempty"` + Metastore map[string]*DataSourceMetastore `json:"databricks_metastore,omitempty"` + Metastores map[string]*DataSourceMetastores `json:"databricks_metastores,omitempty"` MwsCredentials map[string]*DataSourceMwsCredentials `json:"databricks_mws_credentials,omitempty"` MwsWorkspaces map[string]*DataSourceMwsWorkspaces `json:"databricks_mws_workspaces,omitempty"` NodeType map[string]*DataSourceNodeType `json:"databricks_node_type,omitempty"` @@ -55,6 +57,8 @@ func NewDataSources() *DataSources { InstancePool: make(map[string]*DataSourceInstancePool), Job: make(map[string]*DataSourceJob), Jobs: make(map[string]*DataSourceJobs), + Metastore: make(map[string]*DataSourceMetastore), + Metastores: make(map[string]*DataSourceMetastores), MwsCredentials: make(map[string]*DataSourceMwsCredentials), MwsWorkspaces: make(map[string]*DataSourceMwsWorkspaces), NodeType: make(map[string]*DataSourceNodeType), diff --git a/bundle/internal/tf/schema/resource_access_control_rule_set.go b/bundle/internal/tf/schema/resource_access_control_rule_set.go new file mode 100644 index 000000000..775c0708b --- /dev/null +++ b/bundle/internal/tf/schema/resource_access_control_rule_set.go @@ -0,0 +1,15 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceAccessControlRuleSetGrantRules struct { + Principals []string `json:"principals,omitempty"` + Role string `json:"role"` +} + +type ResourceAccessControlRuleSet struct { + Etag string `json:"etag,omitempty"` + Id string `json:"id,omitempty"` + Name string `json:"name"` + GrantRules []ResourceAccessControlRuleSetGrantRules `json:"grant_rules,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_group.go b/bundle/internal/tf/schema/resource_group.go index 252d20874..7d7860f5d 100644 --- a/bundle/internal/tf/schema/resource_group.go +++ b/bundle/internal/tf/schema/resource_group.go @@ -3,6 +3,7 @@ package schema type ResourceGroup struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` DatabricksSqlAccess bool `json:"databricks_sql_access,omitempty"` diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index e3137ea15..77b681ee5 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -25,19 +25,37 @@ type ResourceJobDbtTask struct { } type ResourceJobEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobGitSourceJobSource struct { + DirtyState string `json:"dirty_state,omitempty"` + ImportFromGitBranch string `json:"import_from_git_branch"` + JobConfigPath string `json:"job_config_path"` } type ResourceJobGitSource struct { - Branch string `json:"branch,omitempty"` - Commit string `json:"commit,omitempty"` - Provider string `json:"provider,omitempty"` - Tag string `json:"tag,omitempty"` - Url string `json:"url"` + Branch string `json:"branch,omitempty"` + Commit string `json:"commit,omitempty"` + Provider string `json:"provider,omitempty"` + Tag string `json:"tag,omitempty"` + Url string `json:"url"` + JobSource *ResourceJobGitSourceJobSource `json:"job_source,omitempty"` +} + +type ResourceJobHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobHealth struct { + Rules []ResourceJobHealthRules `json:"rules,omitempty"` } type ResourceJobJobClusterNewClusterAutoscale struct { @@ -384,7 +402,8 @@ type ResourceJobNotificationSettings struct { } type ResourceJobPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobPythonWheelTask struct { @@ -445,11 +464,22 @@ type ResourceJobTaskDependsOn struct { } type ResourceJobTaskEmailNotifications struct { - AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` - NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` - OnFailure []string `json:"on_failure,omitempty"` - OnStart []string `json:"on_start,omitempty"` - OnSuccess []string `json:"on_success,omitempty"` + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` + OnDurationWarningThresholdExceeded []string `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []string `json:"on_failure,omitempty"` + OnStart []string `json:"on_start,omitempty"` + OnSuccess []string `json:"on_success,omitempty"` +} + +type ResourceJobTaskHealthRules struct { + Metric string `json:"metric,omitempty"` + Op string `json:"op,omitempty"` + Value int `json:"value,omitempty"` +} + +type ResourceJobTaskHealth struct { + Rules []ResourceJobTaskHealthRules `json:"rules,omitempty"` } type ResourceJobTaskLibraryCran struct { @@ -634,8 +664,15 @@ type ResourceJobTaskNotebookTask struct { Source string `json:"source,omitempty"` } +type ResourceJobTaskNotificationSettings struct { + AlertOnLastAttempt bool `json:"alert_on_last_attempt,omitempty"` + NoAlertForCanceledRuns bool `json:"no_alert_for_canceled_runs,omitempty"` + NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` +} + type ResourceJobTaskPipelineTask struct { - PipelineId string `json:"pipeline_id"` + FullRefresh bool `json:"full_refresh,omitempty"` + PipelineId string `json:"pipeline_id"` } type ResourceJobTaskPythonWheelTask struct { @@ -702,29 +739,31 @@ type ResourceJobTaskSqlTask struct { } type ResourceJobTask struct { - ComputeKey string `json:"compute_key,omitempty"` - Description string `json:"description,omitempty"` - ExistingClusterId string `json:"existing_cluster_id,omitempty"` - JobClusterKey string `json:"job_cluster_key,omitempty"` - MaxRetries int `json:"max_retries,omitempty"` - MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` - RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` - RunIf string `json:"run_if,omitempty"` - TaskKey string `json:"task_key,omitempty"` - TimeoutSeconds int `json:"timeout_seconds,omitempty"` - ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` - DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` - DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` - EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` - Library []ResourceJobTaskLibrary `json:"library,omitempty"` - NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` - NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` - PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` - PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` - SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` - SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` - SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` - SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` + ComputeKey string `json:"compute_key,omitempty"` + Description string `json:"description,omitempty"` + ExistingClusterId string `json:"existing_cluster_id,omitempty"` + JobClusterKey string `json:"job_cluster_key,omitempty"` + MaxRetries int `json:"max_retries,omitempty"` + MinRetryIntervalMillis int `json:"min_retry_interval_millis,omitempty"` + RetryOnTimeout bool `json:"retry_on_timeout,omitempty"` + RunIf string `json:"run_if,omitempty"` + TaskKey string `json:"task_key,omitempty"` + TimeoutSeconds int `json:"timeout_seconds,omitempty"` + ConditionTask *ResourceJobTaskConditionTask `json:"condition_task,omitempty"` + DbtTask *ResourceJobTaskDbtTask `json:"dbt_task,omitempty"` + DependsOn []ResourceJobTaskDependsOn `json:"depends_on,omitempty"` + EmailNotifications *ResourceJobTaskEmailNotifications `json:"email_notifications,omitempty"` + Health *ResourceJobTaskHealth `json:"health,omitempty"` + Library []ResourceJobTaskLibrary `json:"library,omitempty"` + NewCluster *ResourceJobTaskNewCluster `json:"new_cluster,omitempty"` + NotebookTask *ResourceJobTaskNotebookTask `json:"notebook_task,omitempty"` + NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"` + PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` + PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` + SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` + SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` + SqlTask *ResourceJobTaskSqlTask `json:"sql_task,omitempty"` } type ResourceJobTriggerFileArrival struct { @@ -738,6 +777,10 @@ type ResourceJobTrigger struct { FileArrival *ResourceJobTriggerFileArrival `json:"file_arrival,omitempty"` } +type ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded struct { + Id string `json:"id"` +} + type ResourceJobWebhookNotificationsOnFailure struct { Id string `json:"id"` } @@ -751,13 +794,15 @@ type ResourceJobWebhookNotificationsOnSuccess struct { } type ResourceJobWebhookNotifications struct { - OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` - OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` - OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` + OnDurationWarningThresholdExceeded []ResourceJobWebhookNotificationsOnDurationWarningThresholdExceeded `json:"on_duration_warning_threshold_exceeded,omitempty"` + OnFailure []ResourceJobWebhookNotificationsOnFailure `json:"on_failure,omitempty"` + OnStart []ResourceJobWebhookNotificationsOnStart `json:"on_start,omitempty"` + OnSuccess []ResourceJobWebhookNotificationsOnSuccess `json:"on_success,omitempty"` } type ResourceJob struct { AlwaysRunning bool `json:"always_running,omitempty"` + ControlRunState bool `json:"control_run_state,omitempty"` ExistingClusterId string `json:"existing_cluster_id,omitempty"` Format string `json:"format,omitempty"` Id string `json:"id,omitempty"` @@ -774,6 +819,7 @@ type ResourceJob struct { DbtTask *ResourceJobDbtTask `json:"dbt_task,omitempty"` EmailNotifications *ResourceJobEmailNotifications `json:"email_notifications,omitempty"` GitSource *ResourceJobGitSource `json:"git_source,omitempty"` + Health *ResourceJobHealth `json:"health,omitempty"` JobCluster []ResourceJobJobCluster `json:"job_cluster,omitempty"` Library []ResourceJobLibrary `json:"library,omitempty"` NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"` diff --git a/bundle/internal/tf/schema/resource_service_principal.go b/bundle/internal/tf/schema/resource_service_principal.go index bdbce2278..5e9943a13 100644 --- a/bundle/internal/tf/schema/resource_service_principal.go +++ b/bundle/internal/tf/schema/resource_service_principal.go @@ -3,6 +3,7 @@ package schema type ResourceServicePrincipal struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resource_user.go b/bundle/internal/tf/schema/resource_user.go index b96440934..2fe57b8b0 100644 --- a/bundle/internal/tf/schema/resource_user.go +++ b/bundle/internal/tf/schema/resource_user.go @@ -3,6 +3,7 @@ package schema type ResourceUser struct { + AclPrincipalId string `json:"acl_principal_id,omitempty"` Active bool `json:"active,omitempty"` AllowClusterCreate bool `json:"allow_cluster_create,omitempty"` AllowInstancePoolCreate bool `json:"allow_instance_pool_create,omitempty"` diff --git a/bundle/internal/tf/schema/resources.go b/bundle/internal/tf/schema/resources.go index 7a0c2eb8b..c2361254a 100644 --- a/bundle/internal/tf/schema/resources.go +++ b/bundle/internal/tf/schema/resources.go @@ -3,6 +3,7 @@ package schema type Resources struct { + AccessControlRuleSet map[string]*ResourceAccessControlRuleSet `json:"databricks_access_control_rule_set,omitempty"` AwsS3Mount map[string]*ResourceAwsS3Mount `json:"databricks_aws_s3_mount,omitempty"` AzureAdlsGen1Mount map[string]*ResourceAzureAdlsGen1Mount `json:"databricks_azure_adls_gen1_mount,omitempty"` AzureAdlsGen2Mount map[string]*ResourceAzureAdlsGen2Mount `json:"databricks_azure_adls_gen2_mount,omitempty"` @@ -82,6 +83,7 @@ type Resources struct { func NewResources() *Resources { return &Resources{ + AccessControlRuleSet: make(map[string]*ResourceAccessControlRuleSet), AwsS3Mount: make(map[string]*ResourceAwsS3Mount), AzureAdlsGen1Mount: make(map[string]*ResourceAzureAdlsGen1Mount), AzureAdlsGen2Mount: make(map[string]*ResourceAzureAdlsGen2Mount), From f7a76ff5d8677d5567fd61dcaa08485b2ad4fde4 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 7 Aug 2023 11:55:30 +0200 Subject: [PATCH 042/139] Fixed processing jobs libraries with remote path (#638) ## Changes Some library paths such as for Spark jobs, can reference a lib on remote path, for example DBFS. This PR fixes how CLI handles such libraries and do not report them as missing locally. ## Tests Added unit tests + ran `databricks bundle deploy` manually --- bundle/libraries/libraries.go | 18 ++++++++++++++- bundle/tests/bundle/python_wheel/bundle.yml | 2 ++ .../bundle/python_wheel_dbfs_lib/bundle.yml | 15 +++++++++++++ .../python_wheel_no_artifact/bundle.yml | 2 ++ bundle/tests/bundle/wheel_test.go | 22 +++++++++++++++++++ 5 files changed, 58 insertions(+), 1 deletion(-) create mode 100644 bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index f7a2574ad..8ccf3fc7b 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "path/filepath" + "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" @@ -111,5 +112,20 @@ func libPath(library *compute.Library) string { } func isLocalLibrary(library *compute.Library) bool { - return libPath(library) != "" + path := libPath(library) + if path == "" { + return false + } + + return !isDbfsPath(path) && !isWorkspacePath(path) +} + +func isDbfsPath(path string) bool { + return strings.HasPrefix(path, "dbfs:/") +} + +func isWorkspacePath(path string) bool { + return strings.HasPrefix(path, "/Workspace/") || + strings.HasPrefix(path, "/Users/") || + strings.HasPrefix(path, "/Shared/") } diff --git a/bundle/tests/bundle/python_wheel/bundle.yml b/bundle/tests/bundle/python_wheel/bundle.yml index 4e272c9f5..c82ff83f7 100644 --- a/bundle/tests/bundle/python_wheel/bundle.yml +++ b/bundle/tests/bundle/python_wheel/bundle.yml @@ -17,3 +17,5 @@ resources: python_wheel_task: package_name: "my_test_code" entry_point: "run" + libraries: + - whl: ./my_test_code/dist/*.whl diff --git a/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml new file mode 100644 index 000000000..54577d658 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml @@ -0,0 +1,15 @@ +bundle: + name: python-wheel + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-132531-5opeqon1" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: dbfs://path/to/dist/mywheel.whl diff --git a/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml index 109086729..88cb47be5 100644 --- a/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml +++ b/bundle/tests/bundle/python_wheel_no_artifact/bundle.yml @@ -11,3 +11,5 @@ resources: python_wheel_task: package_name: "my_test_code" entry_point: "run" + libraries: + - whl: ./dist/*.whl diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index 2290e47c6..bfc1fa04a 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -6,6 +6,7 @@ import ( "testing" "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/phases" "github.com/stretchr/testify/require" ) @@ -21,6 +22,10 @@ func TestBundlePythonWheelBuild(t *testing.T) { matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) + + match := libraries.MatchWithArtifacts() + err = match.Apply(context.Background(), b) + require.NoError(t, err) } func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { @@ -34,4 +39,21 @@ func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") require.NoError(t, err) require.Equal(t, 1, len(matches)) + + match := libraries.MatchWithArtifacts() + err = match.Apply(context.Background(), b) + require.NoError(t, err) +} + +func TestBundlePythonWheelWithDBFSLib(t *testing.T) { + b, err := bundle.Load("./python_wheel_dbfs_lib") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(context.Background(), b) + require.NoError(t, err) + + match := libraries.MatchWithArtifacts() + err = match.Apply(context.Background(), b) + require.NoError(t, err) } From 55e62366fa48aba8c28aaf0c297b47658861ff69 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 7 Aug 2023 14:44:01 +0200 Subject: [PATCH 043/139] Add unit test for file name execution during rendering (#640) ## Changes Adds a Unit test that directories and files in the file tree are executed as templates --- libs/template/renderer_test.go | 25 +++++++++++++++++++ .../{{.dir_name}}/{{.file_name}}.tmpl | 0 2 files changed, 25 insertions(+) create mode 100644 libs/template/testdata/file-tree-rendering/template/{{.dir_name}}/{{.file_name}}.tmpl diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index 8cd89ae99..37b94b1ee 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -434,3 +434,28 @@ func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) { assert.Equal(t, r.files[0].(*copyFile).srcPath, "not-a-template") assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "not-a-template")) } + +func TestRendererFileTreeRendering(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, map[string]any{ + "dir_name": "my_directory", + "file_name": "my_file", + }, "./testdata/file-tree-rendering/template", "./testdata/file-tree-rendering/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + // Assert in memory representation is created. + assert.Len(t, r.files, 1) + assert.Equal(t, r.files[0].DstPath().absPath(), filepath.Join(tmpDir, "my_directory", "my_file")) + + err = r.persistToDisk() + require.NoError(t, err) + + // Assert files and directories are correctly materialized. + assert.DirExists(t, filepath.Join(tmpDir, "my_directory")) + assert.FileExists(t, filepath.Join(tmpDir, "my_directory", "my_file")) +} diff --git a/libs/template/testdata/file-tree-rendering/template/{{.dir_name}}/{{.file_name}}.tmpl b/libs/template/testdata/file-tree-rendering/template/{{.dir_name}}/{{.file_name}}.tmpl new file mode 100644 index 000000000..e69de29bb From 81ee031a0415ab36442cbabacfee5da875a7de62 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 7 Aug 2023 15:14:25 +0200 Subject: [PATCH 044/139] Add bundle init command and support for prompting user for input values (#631) ## Changes This PR adds two features: 1. The bundle init command 2. Support for prompting for input values In order to do this, this PR also introduces a new `config` struct which handles reading config files, prompting users and all validation steps before we materialize the template With this PR users can start authoring custom templates, based on go text templates, for their projects / orgs. ## Tests Unit tests, both existing and new --- cmd/bundle/bundle.go | 1 + cmd/bundle/init.go | 79 +++++ cmd/bundle/init_test.go | 27 ++ libs/template/config.go | 198 +++++++++++++ libs/template/config_test.go | 163 +++++++++++ libs/template/materialize.go | 60 ++++ libs/template/schema.go | 121 -------- libs/template/schema_test.go | 274 ------------------ .../config.json | 6 + .../config.json | 3 + .../config-assign-from-file/config.json | 6 + libs/template/utils.go | 99 +++++++ libs/template/utils_test.go | 115 ++++++++ libs/template/validators.go | 4 +- libs/template/validators_test.go | 61 +++- 15 files changed, 815 insertions(+), 402 deletions(-) create mode 100644 cmd/bundle/init.go create mode 100644 cmd/bundle/init_test.go create mode 100644 libs/template/config.go create mode 100644 libs/template/config_test.go create mode 100644 libs/template/materialize.go delete mode 100644 libs/template/schema.go delete mode 100644 libs/template/schema_test.go create mode 100644 libs/template/testdata/config-assign-from-file-invalid-int/config.json create mode 100644 libs/template/testdata/config-assign-from-file-unknown-property/config.json create mode 100644 libs/template/testdata/config-assign-from-file/config.json create mode 100644 libs/template/utils.go create mode 100644 libs/template/utils_test.go diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go index 8d1216f85..c933ec9c3 100644 --- a/cmd/bundle/bundle.go +++ b/cmd/bundle/bundle.go @@ -19,5 +19,6 @@ func New() *cobra.Command { cmd.AddCommand(newSyncCommand()) cmd.AddCommand(newTestCommand()) cmd.AddCommand(newValidateCommand()) + cmd.AddCommand(newInitCommand()) return cmd } diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go new file mode 100644 index 000000000..e3d76ecf2 --- /dev/null +++ b/cmd/bundle/init.go @@ -0,0 +1,79 @@ +package bundle + +import ( + "os" + "path/filepath" + "strings" + + "github.com/databricks/cli/libs/git" + "github.com/databricks/cli/libs/template" + "github.com/spf13/cobra" +) + +var gitUrlPrefixes = []string{ + "https://", + "git@", +} + +func isRepoUrl(url string) bool { + result := false + for _, prefix := range gitUrlPrefixes { + if strings.HasPrefix(url, prefix) { + result = true + break + } + } + return result +} + +// Computes the repo name from the repo URL. Treats the last non empty word +// when splitting at '/' as the repo name. For example: for url git@github.com:databricks/cli.git +// the name would be "cli.git" +func repoName(url string) string { + parts := strings.Split(strings.TrimRight(url, "/"), "/") + return parts[len(parts)-1] +} + +func newInitCommand() *cobra.Command { + cmd := &cobra.Command{ + Use: "init TEMPLATE_PATH", + Short: "Initialize Template", + Args: cobra.ExactArgs(1), + } + + var configFile string + var projectDir string + cmd.Flags().StringVar(&configFile, "config-file", "", "File containing input parameters for template initialization.") + cmd.Flags().StringVar(&projectDir, "project-dir", "", "The project will be initialized in this directory.") + cmd.MarkFlagRequired("project-dir") + + cmd.RunE = func(cmd *cobra.Command, args []string) error { + templatePath := args[0] + ctx := cmd.Context() + + if !isRepoUrl(templatePath) { + // skip downloading the repo because input arg is not a URL. We assume + // it's a path on the local file system in that case + return template.Materialize(ctx, configFile, templatePath, projectDir) + } + + // Download the template in a temporary directory + tmpDir := os.TempDir() + templateURL := templatePath + templateDir := filepath.Join(tmpDir, repoName(templateURL)) + err := os.MkdirAll(templateDir, 0755) + if err != nil { + return err + } + // TODO: Add automated test that the downloaded git repo is cleaned up. + err = git.Clone(ctx, templateURL, "", templateDir) + if err != nil { + return err + } + defer os.RemoveAll(templateDir) + + return template.Materialize(ctx, configFile, templateDir, projectDir) + } + + return cmd +} diff --git a/cmd/bundle/init_test.go b/cmd/bundle/init_test.go new file mode 100644 index 000000000..4a795160e --- /dev/null +++ b/cmd/bundle/init_test.go @@ -0,0 +1,27 @@ +package bundle + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestBundleInitIsRepoUrl(t *testing.T) { + assert.True(t, isRepoUrl("git@github.com:databricks/cli.git")) + assert.True(t, isRepoUrl("https://github.com/databricks/cli.git")) + + assert.False(t, isRepoUrl("./local")) + assert.False(t, isRepoUrl("foo")) +} + +func TestBundleInitRepoName(t *testing.T) { + // Test valid URLs + assert.Equal(t, "cli.git", repoName("git@github.com:databricks/cli.git")) + assert.Equal(t, "cli", repoName("https://github.com/databricks/cli/")) + + // test invalid URLs. In these cases the error would be floated when the + // git clone operation fails. + assert.Equal(t, "git@github.com:databricks", repoName("git@github.com:databricks")) + assert.Equal(t, "invalid-url", repoName("invalid-url")) + assert.Equal(t, "www.github.com", repoName("https://www.github.com")) +} diff --git a/libs/template/config.go b/libs/template/config.go new file mode 100644 index 000000000..ee5fcbef8 --- /dev/null +++ b/libs/template/config.go @@ -0,0 +1,198 @@ +package template + +import ( + "context" + "encoding/json" + "fmt" + "os" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/jsonschema" +) + +type config struct { + ctx context.Context + values map[string]any + schema *jsonschema.Schema +} + +func newConfig(ctx context.Context, schemaPath string) (*config, error) { + // Read config schema + schemaBytes, err := os.ReadFile(schemaPath) + if err != nil { + return nil, err + } + schema := &jsonschema.Schema{} + err = json.Unmarshal(schemaBytes, schema) + if err != nil { + return nil, err + } + + // Return config + return &config{ + ctx: ctx, + schema: schema, + values: make(map[string]any, 0), + }, nil +} + +// Reads json file at path and assigns values from the file +func (c *config) assignValuesFromFile(path string) error { + // Read the config file + configFromFile := make(map[string]any, 0) + b, err := os.ReadFile(path) + if err != nil { + return err + } + err = json.Unmarshal(b, &configFromFile) + if err != nil { + return err + } + + // Cast any integer properties, from float to integer. Required because + // the json unmarshaller treats all json numbers as floating point + for name, floatVal := range configFromFile { + property, ok := c.schema.Properties[name] + if !ok { + return fmt.Errorf("%s is not defined as an input parameter for the template", name) + } + if property.Type != jsonschema.IntegerType { + continue + } + v, err := toInteger(floatVal) + if err != nil { + return fmt.Errorf("failed to cast value %v of property %s from file %s to an integer: %w", floatVal, name, path, err) + } + configFromFile[name] = v + } + + // Write configs from the file to the input map, not overwriting any existing + // configurations. + for name, val := range configFromFile { + if _, ok := c.values[name]; ok { + continue + } + c.values[name] = val + } + return nil +} + +// Assigns default values from schema to input config map +func (c *config) assignDefaultValues() error { + for name, property := range c.schema.Properties { + // Config already has a value assigned + if _, ok := c.values[name]; ok { + continue + } + + // No default value defined for the property + if property.Default == nil { + continue + } + + // Assign default value if property is not an integer + if property.Type != jsonschema.IntegerType { + c.values[name] = property.Default + continue + } + + // Cast default value to int before assigning to an integer configuration. + // Required because untyped field Default will read all numbers as floats + // during unmarshalling + v, err := toInteger(property.Default) + if err != nil { + return fmt.Errorf("failed to cast default value %v of property %s to an integer: %w", property.Default, name, err) + } + c.values[name] = v + } + return nil +} + +// Prompts user for values for properties that do not have a value set yet +func (c *config) promptForValues() error { + for name, property := range c.schema.Properties { + // Config already has a value assigned + if _, ok := c.values[name]; ok { + continue + } + + // Initialize Prompt dialog + var err error + prompt := cmdio.Prompt(c.ctx) + prompt.Label = property.Description + prompt.AllowEdit = true + + // Compute default value to display by converting it to a string + if property.Default != nil { + prompt.Default, err = toString(property.Default, property.Type) + if err != nil { + return err + } + } + + // Get user input by running the prompt + userInput, err := prompt.Run() + if err != nil { + return err + } + + // Convert user input string back to a value + c.values[name], err = fromString(userInput, property.Type) + if err != nil { + return err + } + } + return nil +} + +// Prompt user for any missing config values. Assign default values if +// terminal is not TTY +func (c *config) promptOrAssignDefaultValues() error { + if cmdio.IsOutTTY(c.ctx) && cmdio.IsInTTY(c.ctx) { + return c.promptForValues() + } + return c.assignDefaultValues() +} + +// Validates the configuration. If passes, the configuration is ready to be used +// to initialize the template. +func (c *config) validate() error { + validateFns := []func() error{ + c.validateValuesDefined, + c.validateValuesType, + } + + for _, fn := range validateFns { + err := fn() + if err != nil { + return err + } + } + return nil +} + +// Validates all input properties have a user defined value assigned to them +func (c *config) validateValuesDefined() error { + for k := range c.schema.Properties { + if _, ok := c.values[k]; ok { + continue + } + return fmt.Errorf("no value has been assigned to input parameter %s", k) + } + return nil +} + +// Validates the types of all input properties values match their types defined in the schema +func (c *config) validateValuesType() error { + for k, v := range c.values { + fieldInfo, ok := c.schema.Properties[k] + if !ok { + return fmt.Errorf("%s is not defined as an input parameter for the template", k) + } + err := validateType(v, fieldInfo.Type) + if err != nil { + return fmt.Errorf("incorrect type for %s. %w", k, err) + } + } + return nil +} diff --git a/libs/template/config_test.go b/libs/template/config_test.go new file mode 100644 index 000000000..7b8341ec4 --- /dev/null +++ b/libs/template/config_test.go @@ -0,0 +1,163 @@ +package template + +import ( + "encoding/json" + "testing" + + "github.com/databricks/cli/libs/jsonschema" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func testSchema(t *testing.T) *jsonschema.Schema { + schemaJson := `{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + } + }` + var jsonSchema jsonschema.Schema + err := json.Unmarshal([]byte(schemaJson), &jsonSchema) + require.NoError(t, err) + return &jsonSchema +} + +func TestTemplateConfigAssignValuesFromFile(t *testing.T) { + c := config{ + schema: testSchema(t), + values: make(map[string]any), + } + + err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") + assert.NoError(t, err) + + assert.Equal(t, int64(1), c.values["int_val"]) + assert.Equal(t, float64(2), c.values["float_val"]) + assert.Equal(t, true, c.values["bool_val"]) + assert.Equal(t, "hello", c.values["string_val"]) +} + +func TestTemplateConfigAssignValuesFromFileForUnknownField(t *testing.T) { + c := config{ + schema: testSchema(t), + values: make(map[string]any), + } + + err := c.assignValuesFromFile("./testdata/config-assign-from-file-unknown-property/config.json") + assert.EqualError(t, err, "unknown_prop is not defined as an input parameter for the template") +} + +func TestTemplateConfigAssignValuesFromFileForInvalidIntegerValue(t *testing.T) { + c := config{ + schema: testSchema(t), + values: make(map[string]any), + } + + err := c.assignValuesFromFile("./testdata/config-assign-from-file-invalid-int/config.json") + assert.EqualError(t, err, "failed to cast value abc of property int_val from file ./testdata/config-assign-from-file-invalid-int/config.json to an integer: cannot convert \"abc\" to an integer") +} + +func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *testing.T) { + c := config{ + schema: testSchema(t), + values: map[string]any{ + "string_val": "this-is-not-overwritten", + }, + } + + err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") + assert.NoError(t, err) + + assert.Equal(t, int64(1), c.values["int_val"]) + assert.Equal(t, float64(2), c.values["float_val"]) + assert.Equal(t, true, c.values["bool_val"]) + assert.Equal(t, "this-is-not-overwritten", c.values["string_val"]) +} + +func TestTemplateConfigAssignDefaultValues(t *testing.T) { + c := config{ + schema: testSchema(t), + values: make(map[string]any), + } + + err := c.assignDefaultValues() + assert.NoError(t, err) + + assert.Len(t, c.values, 2) + assert.Equal(t, "abc", c.values["string_val"]) + assert.Equal(t, int64(123), c.values["int_val"]) +} + +func TestTemplateConfigValidateValuesDefined(t *testing.T) { + c := config{ + schema: testSchema(t), + values: map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + }, + } + + err := c.validateValuesDefined() + assert.EqualError(t, err, "no value has been assigned to input parameter string_val") +} + +func TestTemplateConfigValidateTypeForValidConfig(t *testing.T) { + c := &config{ + schema: testSchema(t), + values: map[string]any{ + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", + }, + } + + err := c.validateValuesType() + assert.NoError(t, err) + + err = c.validate() + assert.NoError(t, err) +} + +func TestTemplateConfigValidateTypeForUnknownField(t *testing.T) { + c := &config{ + schema: testSchema(t), + values: map[string]any{ + "unknown_prop": 1, + }, + } + + err := c.validateValuesType() + assert.EqualError(t, err, "unknown_prop is not defined as an input parameter for the template") +} + +func TestTemplateConfigValidateTypeForInvalidType(t *testing.T) { + c := &config{ + schema: testSchema(t), + values: map[string]any{ + "int_val": "this-should-be-an-int", + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", + }, + } + + err := c.validateValuesType() + assert.EqualError(t, err, `incorrect type for int_val. expected type integer, but value is "this-should-be-an-int"`) + + err = c.validate() + assert.EqualError(t, err, `incorrect type for int_val. expected type integer, but value is "this-should-be-an-int"`) +} diff --git a/libs/template/materialize.go b/libs/template/materialize.go new file mode 100644 index 000000000..bbc9e8da3 --- /dev/null +++ b/libs/template/materialize.go @@ -0,0 +1,60 @@ +package template + +import ( + "context" + "path/filepath" +) + +const libraryDirName = "library" +const templateDirName = "template" +const schemaFileName = "databricks_template_schema.json" + +// This function materializes the input templates as a project, using user defined +// configurations. +// Parameters: +// +// ctx: context containing a cmdio object. This is used to prompt the user +// configFilePath: file path containing user defined config values +// templateRoot: root of the template definition +// projectDir: root of directory where to initialize the project +func Materialize(ctx context.Context, configFilePath, templateRoot, projectDir string) error { + templatePath := filepath.Join(templateRoot, templateDirName) + libraryPath := filepath.Join(templateRoot, libraryDirName) + schemaPath := filepath.Join(templateRoot, schemaFileName) + + config, err := newConfig(ctx, schemaPath) + if err != nil { + return err + } + + // Read and assign config values from file + if configFilePath != "" { + err = config.assignValuesFromFile(configFilePath) + if err != nil { + return err + } + } + + // Prompt user for any missing config values. Assign default values if + // terminal is not TTY + err = config.promptOrAssignDefaultValues() + if err != nil { + return err + } + + err = config.validate() + if err != nil { + return err + } + + // Walk and render the template, since input configuration is complete + r, err := newRenderer(ctx, config.values, templatePath, libraryPath, projectDir) + if err != nil { + return err + } + err = r.walk() + if err != nil { + return err + } + return r.persistToDisk() +} diff --git a/libs/template/schema.go b/libs/template/schema.go deleted file mode 100644 index 957cd66c7..000000000 --- a/libs/template/schema.go +++ /dev/null @@ -1,121 +0,0 @@ -package template - -import ( - "encoding/json" - "fmt" - "os" - - "github.com/databricks/cli/libs/jsonschema" -) - -// function to check whether a float value represents an integer -func isIntegerValue(v float64) bool { - return v == float64(int(v)) -} - -// cast value to integer for config values that are floats but are supposed to be -// integers according to the schema -// -// Needed because the default json unmarshaler for maps converts all numbers to floats -func castFloatConfigValuesToInt(config map[string]any, jsonSchema *jsonschema.Schema) error { - for k, v := range config { - // error because all config keys should be defined in schema too - fieldInfo, ok := jsonSchema.Properties[k] - if !ok { - return fmt.Errorf("%s is not defined as an input parameter for the template", k) - } - // skip non integer fields - if fieldInfo.Type != jsonschema.IntegerType { - continue - } - - // convert floating point type values to integer - switch floatVal := v.(type) { - case float32: - if !isIntegerValue(float64(floatVal)) { - return fmt.Errorf("expected %s to have integer value but it is %v", k, v) - } - config[k] = int(floatVal) - case float64: - if !isIntegerValue(floatVal) { - return fmt.Errorf("expected %s to have integer value but it is %v", k, v) - } - config[k] = int(floatVal) - } - } - return nil -} - -func assignDefaultConfigValues(config map[string]any, schema *jsonschema.Schema) error { - for k, v := range schema.Properties { - if _, ok := config[k]; ok { - continue - } - if v.Default == nil { - return fmt.Errorf("input parameter %s is not defined in config", k) - } - config[k] = v.Default - } - return nil -} - -func validateConfigValueTypes(config map[string]any, schema *jsonschema.Schema) error { - // validate types defined in config - for k, v := range config { - fieldInfo, ok := schema.Properties[k] - if !ok { - return fmt.Errorf("%s is not defined as an input parameter for the template", k) - } - err := validateType(v, fieldInfo.Type) - if err != nil { - return fmt.Errorf("incorrect type for %s. %w", k, err) - } - } - return nil -} - -func ReadSchema(path string) (*jsonschema.Schema, error) { - schemaBytes, err := os.ReadFile(path) - if err != nil { - return nil, err - } - schema := &jsonschema.Schema{} - err = json.Unmarshal(schemaBytes, schema) - if err != nil { - return nil, err - } - return schema, nil -} - -func ReadConfig(path string, jsonSchema *jsonschema.Schema) (map[string]any, error) { - // Read config file - var config map[string]any - b, err := os.ReadFile(path) - if err != nil { - return nil, err - } - err = json.Unmarshal(b, &config) - if err != nil { - return nil, err - } - - // Assign default value to any fields that do not have a value yet - err = assignDefaultConfigValues(config, jsonSchema) - if err != nil { - return nil, err - } - - // cast any fields that are supposed to be integers. The json unmarshalling - // for a generic map converts all numbers to floating point - err = castFloatConfigValuesToInt(config, jsonSchema) - if err != nil { - return nil, err - } - - // validate config according to schema - err = validateConfigValueTypes(config, jsonSchema) - if err != nil { - return nil, err - } - return config, nil -} diff --git a/libs/template/schema_test.go b/libs/template/schema_test.go deleted file mode 100644 index ba30f81a9..000000000 --- a/libs/template/schema_test.go +++ /dev/null @@ -1,274 +0,0 @@ -package template - -import ( - "encoding/json" - "testing" - - "github.com/databricks/cli/libs/jsonschema" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func testSchema(t *testing.T) *jsonschema.Schema { - schemaJson := `{ - "properties": { - "int_val": { - "type": "integer" - }, - "float_val": { - "type": "number" - }, - "bool_val": { - "type": "boolean" - }, - "string_val": { - "type": "string" - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) - require.NoError(t, err) - return &jsonSchema -} - -func TestTemplateSchemaIsInteger(t *testing.T) { - assert.False(t, isIntegerValue(1.1)) - assert.False(t, isIntegerValue(0.1)) - assert.False(t, isIntegerValue(-0.1)) - - assert.True(t, isIntegerValue(-1.0)) - assert.True(t, isIntegerValue(0.0)) - assert.True(t, isIntegerValue(2.0)) -} - -func TestTemplateSchemaCastFloatToInt(t *testing.T) { - // define schema for config - jsonSchema := testSchema(t) - - // define the config - configJson := `{ - "int_val": 1, - "float_val": 2, - "bool_val": true, - "string_val": "main hoon na" - }` - var config map[string]any - err := json.Unmarshal([]byte(configJson), &config) - require.NoError(t, err) - - // assert types before casting, checking that the integer was indeed loaded - // as a floating point - assert.IsType(t, float64(0), config["int_val"]) - assert.IsType(t, float64(0), config["float_val"]) - assert.IsType(t, true, config["bool_val"]) - assert.IsType(t, "abc", config["string_val"]) - - err = castFloatConfigValuesToInt(config, jsonSchema) - require.NoError(t, err) - - // assert type after casting, that the float value was converted to an integer - // for int_val. - assert.IsType(t, int(0), config["int_val"]) - assert.IsType(t, float64(0), config["float_val"]) - assert.IsType(t, true, config["bool_val"]) - assert.IsType(t, "abc", config["string_val"]) -} - -func TestTemplateSchemaCastFloatToIntFailsForUnknownTypes(t *testing.T) { - // define schema for config - schemaJson := `{ - "properties": { - "foo": { - "type": "integer" - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) - require.NoError(t, err) - - // define the config - configJson := `{ - "bar": true - }` - var config map[string]any - err = json.Unmarshal([]byte(configJson), &config) - require.NoError(t, err) - - err = castFloatConfigValuesToInt(config, &jsonSchema) - assert.ErrorContains(t, err, "bar is not defined as an input parameter for the template") -} - -func TestTemplateSchemaCastFloatToIntFailsWhenWithNonIntValues(t *testing.T) { - // define schema for config - schemaJson := `{ - "properties": { - "foo": { - "type": "integer" - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) - require.NoError(t, err) - - // define the config - configJson := `{ - "foo": 1.1 - }` - var config map[string]any - err = json.Unmarshal([]byte(configJson), &config) - require.NoError(t, err) - - err = castFloatConfigValuesToInt(config, &jsonSchema) - assert.ErrorContains(t, err, "expected foo to have integer value but it is 1.1") -} - -func TestTemplateSchemaValidateType(t *testing.T) { - // assert validation passing - err := validateType(int(0), jsonschema.IntegerType) - assert.NoError(t, err) - err = validateType(int32(1), jsonschema.IntegerType) - assert.NoError(t, err) - err = validateType(int64(1), jsonschema.IntegerType) - assert.NoError(t, err) - - err = validateType(float32(1.1), jsonschema.NumberType) - assert.NoError(t, err) - err = validateType(float64(1.2), jsonschema.NumberType) - assert.NoError(t, err) - err = validateType(int(1), jsonschema.NumberType) - assert.NoError(t, err) - - err = validateType(false, jsonschema.BooleanType) - assert.NoError(t, err) - - err = validateType("abc", jsonschema.StringType) - assert.NoError(t, err) - - // assert validation failing for integers - err = validateType(float64(1.2), jsonschema.IntegerType) - assert.ErrorContains(t, err, "expected type integer, but value is 1.2") - err = validateType(true, jsonschema.IntegerType) - assert.ErrorContains(t, err, "expected type integer, but value is true") - err = validateType("abc", jsonschema.IntegerType) - assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") - - // assert validation failing for floats - err = validateType(true, jsonschema.NumberType) - assert.ErrorContains(t, err, "expected type float, but value is true") - err = validateType("abc", jsonschema.NumberType) - assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") - - // assert validation failing for boolean - err = validateType(int(1), jsonschema.BooleanType) - assert.ErrorContains(t, err, "expected type boolean, but value is 1") - err = validateType(float64(1), jsonschema.BooleanType) - assert.ErrorContains(t, err, "expected type boolean, but value is 1") - err = validateType("abc", jsonschema.BooleanType) - assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") - - // assert validation failing for string - err = validateType(int(1), jsonschema.StringType) - assert.ErrorContains(t, err, "expected type string, but value is 1") - err = validateType(float64(1), jsonschema.StringType) - assert.ErrorContains(t, err, "expected type string, but value is 1") - err = validateType(false, jsonschema.StringType) - assert.ErrorContains(t, err, "expected type string, but value is false") -} - -func TestTemplateSchemaValidateConfig(t *testing.T) { - // define schema for config - jsonSchema := testSchema(t) - - // define the config - config := map[string]any{ - "int_val": 1, - "float_val": 1.1, - "bool_val": true, - "string_val": "abc", - } - - err := validateConfigValueTypes(config, jsonSchema) - assert.NoError(t, err) -} - -func TestTemplateSchemaValidateConfigFailsForUnknownField(t *testing.T) { - // define schema for config - jsonSchema := testSchema(t) - - // define the config - config := map[string]any{ - "foo": 1, - "float_val": 1.1, - "bool_val": true, - "string_val": "abc", - } - - err := validateConfigValueTypes(config, jsonSchema) - assert.ErrorContains(t, err, "foo is not defined as an input parameter for the template") -} - -func TestTemplateSchemaValidateConfigFailsForWhenIncorrectTypes(t *testing.T) { - // define schema for config - jsonSchema := testSchema(t) - - // define the config - config := map[string]any{ - "int_val": 1, - "float_val": 1.1, - "bool_val": "true", - "string_val": "abc", - } - - err := validateConfigValueTypes(config, jsonSchema) - assert.ErrorContains(t, err, "incorrect type for bool_val. expected type boolean, but value is \"true\"") -} - -func TestTemplateSchemaValidateConfigFailsForWhenMissingInputParams(t *testing.T) { - // define schema for config - schemaJson := `{ - "properties": { - "int_val": { - "type": "integer" - }, - "string_val": { - "type": "string" - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) - require.NoError(t, err) - - // define the config - config := map[string]any{ - "int_val": 1, - } - - err = assignDefaultConfigValues(config, &jsonSchema) - assert.ErrorContains(t, err, "input parameter string_val is not defined in config") -} - -func TestTemplateDefaultAssignment(t *testing.T) { - // define schema for config - schemaJson := `{ - "properties": { - "foo": { - "type": "integer", - "default": 1 - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) - require.NoError(t, err) - - // define the config - config := map[string]any{} - - err = assignDefaultConfigValues(config, &jsonSchema) - assert.NoError(t, err) - assert.Equal(t, 1.0, config["foo"]) -} diff --git a/libs/template/testdata/config-assign-from-file-invalid-int/config.json b/libs/template/testdata/config-assign-from-file-invalid-int/config.json new file mode 100644 index 000000000..a97bf0c2e --- /dev/null +++ b/libs/template/testdata/config-assign-from-file-invalid-int/config.json @@ -0,0 +1,6 @@ +{ + "int_val": "abc", + "float_val": 2, + "bool_val": true, + "string_val": "hello" +} diff --git a/libs/template/testdata/config-assign-from-file-unknown-property/config.json b/libs/template/testdata/config-assign-from-file-unknown-property/config.json new file mode 100644 index 000000000..518eaa6a2 --- /dev/null +++ b/libs/template/testdata/config-assign-from-file-unknown-property/config.json @@ -0,0 +1,3 @@ +{ + "unknown_prop": 123 +} diff --git a/libs/template/testdata/config-assign-from-file/config.json b/libs/template/testdata/config-assign-from-file/config.json new file mode 100644 index 000000000..564001e57 --- /dev/null +++ b/libs/template/testdata/config-assign-from-file/config.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "float_val": 2, + "bool_val": true, + "string_val": "hello" +} diff --git a/libs/template/utils.go b/libs/template/utils.go new file mode 100644 index 000000000..bf11ed86f --- /dev/null +++ b/libs/template/utils.go @@ -0,0 +1,99 @@ +package template + +import ( + "errors" + "fmt" + "strconv" + + "github.com/databricks/cli/libs/jsonschema" +) + +// function to check whether a float value represents an integer +func isIntegerValue(v float64) bool { + return v == float64(int64(v)) +} + +func toInteger(v any) (int64, error) { + switch typedVal := v.(type) { + // cast float to int + case float32: + if !isIntegerValue(float64(typedVal)) { + return 0, fmt.Errorf("expected integer value, got: %v", v) + } + return int64(typedVal), nil + case float64: + if !isIntegerValue(typedVal) { + return 0, fmt.Errorf("expected integer value, got: %v", v) + } + return int64(typedVal), nil + + // pass through common integer cases + case int: + return int64(typedVal), nil + case int32: + return int64(typedVal), nil + case int64: + return typedVal, nil + + default: + return 0, fmt.Errorf("cannot convert %#v to an integer", v) + } +} + +func toString(v any, T jsonschema.Type) (string, error) { + switch T { + case jsonschema.BooleanType: + boolVal, ok := v.(bool) + if !ok { + return "", fmt.Errorf("expected bool, got: %#v", v) + } + return strconv.FormatBool(boolVal), nil + case jsonschema.StringType: + strVal, ok := v.(string) + if !ok { + return "", fmt.Errorf("expected string, got: %#v", v) + } + return strVal, nil + case jsonschema.NumberType: + floatVal, ok := v.(float64) + if !ok { + return "", fmt.Errorf("expected float, got: %#v", v) + } + return strconv.FormatFloat(floatVal, 'f', -1, 64), nil + case jsonschema.IntegerType: + intVal, err := toInteger(v) + if err != nil { + return "", err + } + return strconv.FormatInt(intVal, 10), nil + default: + return "", fmt.Errorf("cannot format object of type %s as a string. Value of object: %#v", T, v) + } +} + +func fromString(s string, T jsonschema.Type) (any, error) { + if T == jsonschema.StringType { + return s, nil + } + + // Variables to store value and error from parsing + var v any + var err error + + switch T { + case jsonschema.BooleanType: + v, err = strconv.ParseBool(s) + case jsonschema.NumberType: + v, err = strconv.ParseFloat(s, 32) + case jsonschema.IntegerType: + v, err = strconv.ParseInt(s, 10, 64) + default: + return "", fmt.Errorf("cannot parse string as object of type %s. Value of string: %q", T, s) + } + + // Return more readable error incase of a syntax error + if errors.Is(err, strconv.ErrSyntax) { + return nil, fmt.Errorf("could not parse %q as a %s: %w", s, T, err) + } + return v, err +} diff --git a/libs/template/utils_test.go b/libs/template/utils_test.go new file mode 100644 index 000000000..5fe702439 --- /dev/null +++ b/libs/template/utils_test.go @@ -0,0 +1,115 @@ +package template + +import ( + "math" + "testing" + + "github.com/databricks/cli/libs/jsonschema" + "github.com/stretchr/testify/assert" +) + +func TestTemplateIsInteger(t *testing.T) { + assert.False(t, isIntegerValue(1.1)) + assert.False(t, isIntegerValue(0.1)) + assert.False(t, isIntegerValue(-0.1)) + + assert.True(t, isIntegerValue(-1.0)) + assert.True(t, isIntegerValue(0.0)) + assert.True(t, isIntegerValue(2.0)) +} + +func TestTemplateToInteger(t *testing.T) { + v, err := toInteger(float32(2)) + assert.NoError(t, err) + assert.Equal(t, int64(2), v) + + v, err = toInteger(float64(4)) + assert.NoError(t, err) + assert.Equal(t, int64(4), v) + + v, err = toInteger(float64(4)) + assert.NoError(t, err) + assert.Equal(t, int64(4), v) + + v, err = toInteger(float64(math.MaxInt32 + 10)) + assert.NoError(t, err) + assert.Equal(t, int64(2147483657), v) + + v, err = toInteger(2) + assert.NoError(t, err) + assert.Equal(t, int64(2), v) + + _, err = toInteger(float32(2.2)) + assert.EqualError(t, err, "expected integer value, got: 2.2") + + _, err = toInteger(float64(math.MaxInt32 + 100.1)) + assert.ErrorContains(t, err, "expected integer value, got: 2.1474837471e+09") + + _, err = toInteger("abcd") + assert.EqualError(t, err, "cannot convert \"abcd\" to an integer") +} + +func TestTemplateToString(t *testing.T) { + s, err := toString(true, jsonschema.BooleanType) + assert.NoError(t, err) + assert.Equal(t, "true", s) + + s, err = toString("abc", jsonschema.StringType) + assert.NoError(t, err) + assert.Equal(t, "abc", s) + + s, err = toString(1.1, jsonschema.NumberType) + assert.NoError(t, err) + assert.Equal(t, "1.1", s) + + s, err = toString(2, jsonschema.IntegerType) + assert.NoError(t, err) + assert.Equal(t, "2", s) + + _, err = toString([]string{}, jsonschema.ArrayType) + assert.EqualError(t, err, "cannot format object of type array as a string. Value of object: []string{}") + + _, err = toString("true", jsonschema.BooleanType) + assert.EqualError(t, err, "expected bool, got: \"true\"") + + _, err = toString(123, jsonschema.StringType) + assert.EqualError(t, err, "expected string, got: 123") + + _, err = toString(false, jsonschema.NumberType) + assert.EqualError(t, err, "expected float, got: false") + + _, err = toString("abc", jsonschema.IntegerType) + assert.EqualError(t, err, "cannot convert \"abc\" to an integer") +} + +func TestTemplateFromString(t *testing.T) { + v, err := fromString("true", jsonschema.BooleanType) + assert.NoError(t, err) + assert.Equal(t, true, v) + + v, err = fromString("abc", jsonschema.StringType) + assert.NoError(t, err) + assert.Equal(t, "abc", v) + + v, err = fromString("1.1", jsonschema.NumberType) + assert.NoError(t, err) + // Floating point conversions are not perfect + assert.True(t, (v.(float64)-1.1) < 0.000001) + + v, err = fromString("12345", jsonschema.IntegerType) + assert.NoError(t, err) + assert.Equal(t, int64(12345), v) + + v, err = fromString("123", jsonschema.NumberType) + assert.NoError(t, err) + assert.Equal(t, float64(123), v) + + _, err = fromString("qrt", jsonschema.ArrayType) + assert.EqualError(t, err, "cannot parse string as object of type array. Value of string: \"qrt\"") + + _, err = fromString("abc", jsonschema.IntegerType) + assert.EqualError(t, err, "could not parse \"abc\" as a integer: strconv.ParseInt: parsing \"abc\": invalid syntax") + + _, err = fromString("1.0", jsonschema.IntegerType) + assert.EqualError(t, err, "could not parse \"1.0\" as a integer: strconv.ParseInt: parsing \"1.0\": invalid syntax") +} diff --git a/libs/template/validators.go b/libs/template/validators.go index 0ae41e461..57eda0935 100644 --- a/libs/template/validators.go +++ b/libs/template/validators.go @@ -33,9 +33,7 @@ func validateBoolean(v any) error { } func validateNumber(v any) error { - if !slices.Contains([]reflect.Kind{reflect.Float32, reflect.Float64, reflect.Int, - reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, - reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64}, + if !slices.Contains([]reflect.Kind{reflect.Float32, reflect.Float64}, reflect.TypeOf(v).Kind()) { return fmt.Errorf("expected type float, but value is %#v", v) } diff --git a/libs/template/validators_test.go b/libs/template/validators_test.go index f0cbf8a14..f34f037a1 100644 --- a/libs/template/validators_test.go +++ b/libs/template/validators_test.go @@ -3,8 +3,8 @@ package template import ( "testing" + "github.com/databricks/cli/libs/jsonschema" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestValidatorString(t *testing.T) { @@ -40,10 +40,10 @@ func TestValidatorNumber(t *testing.T) { assert.ErrorContains(t, err, "expected type float, but value is true") err = validateNumber(int32(1)) - require.NoError(t, err) + assert.ErrorContains(t, err, "expected type float, but value is 1") - err = validateNumber(int64(1)) - require.NoError(t, err) + err = validateNumber(int64(2)) + assert.ErrorContains(t, err, "expected type float, but value is 2") err = validateNumber(float32(1)) assert.NoError(t, err) @@ -74,3 +74,56 @@ func TestValidatorInt(t *testing.T) { err = validateInteger("abc") assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") } + +func TestTemplateValidateType(t *testing.T) { + // assert validation passing + err := validateType(int(0), jsonschema.IntegerType) + assert.NoError(t, err) + err = validateType(int32(1), jsonschema.IntegerType) + assert.NoError(t, err) + err = validateType(int64(1), jsonschema.IntegerType) + assert.NoError(t, err) + + err = validateType(float32(1.1), jsonschema.NumberType) + assert.NoError(t, err) + err = validateType(float64(1.2), jsonschema.NumberType) + assert.NoError(t, err) + + err = validateType(false, jsonschema.BooleanType) + assert.NoError(t, err) + + err = validateType("abc", jsonschema.StringType) + assert.NoError(t, err) + + // assert validation failing for integers + err = validateType(float64(1.2), jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is 1.2") + err = validateType(true, jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is true") + err = validateType("abc", jsonschema.IntegerType) + assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") + + // assert validation failing for floats + err = validateType(true, jsonschema.NumberType) + assert.ErrorContains(t, err, "expected type float, but value is true") + err = validateType("abc", jsonschema.NumberType) + assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") + err = validateType(int(1), jsonschema.NumberType) + assert.ErrorContains(t, err, "expected type float, but value is 1") + + // assert validation failing for boolean + err = validateType(int(1), jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType(float64(1), jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is 1") + err = validateType("abc", jsonschema.BooleanType) + assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") + + // assert validation failing for string + err = validateType(int(1), jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(float64(1), jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is 1") + err = validateType(false, jsonschema.StringType) + assert.ErrorContains(t, err, "expected type string, but value is false") +} From d6f626912f056a0a01d68312b59075ee70adebe5 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 7 Aug 2023 19:29:02 +0200 Subject: [PATCH 045/139] Fix bundle git branch validation (#645) ## Changes This PR: 1. Fixes the computation logic for `ActualBranch`. An error in the earlier logic caused the validation mutator to be a no-op. 2. Makes the `.git` string a global var. This is useful to configure in tests. 3. Adds e2e test for the validation mutator. ## Tests Unit test --- bundle/config/mutator/load_git_details.go | 19 +++++---- bundle/tests/autoload_git_test.go | 20 ---------- .../git_branch_validation/.mock-git/HEAD | 1 + .../git_branch_validation/databricks.yml | 4 ++ bundle/tests/git_test.go | 39 +++++++++++++++++++ libs/git/repository.go | 10 +++-- 6 files changed, 61 insertions(+), 32 deletions(-) delete mode 100644 bundle/tests/autoload_git_test.go create mode 100644 bundle/tests/git_branch_validation/.mock-git/HEAD create mode 100644 bundle/tests/git_branch_validation/databricks.yml create mode 100644 bundle/tests/git_test.go diff --git a/bundle/config/mutator/load_git_details.go b/bundle/config/mutator/load_git_details.go index f22aafe01..ab47677dd 100644 --- a/bundle/config/mutator/load_git_details.go +++ b/bundle/config/mutator/load_git_details.go @@ -24,17 +24,20 @@ func (m *loadGitDetails) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return err } - // load branch name if undefined - if b.Config.Bundle.Git.Branch == "" { - branch, err := repo.CurrentBranch() - if err != nil { - log.Warnf(ctx, "failed to load current branch: %s", err) - } else { - b.Config.Bundle.Git.Branch = branch - b.Config.Bundle.Git.ActualBranch = branch + + // Read branch name of current checkout + branch, err := repo.CurrentBranch() + if err == nil { + b.Config.Bundle.Git.ActualBranch = branch + if b.Config.Bundle.Git.Branch == "" { + // Only load branch if there's no user defined value b.Config.Bundle.Git.Inferred = true + b.Config.Bundle.Git.Branch = branch } + } else { + log.Warnf(ctx, "failed to load current branch: %s", err) } + // load commit hash if undefined if b.Config.Bundle.Git.Commit == "" { commit, err := repo.LatestCommit() diff --git a/bundle/tests/autoload_git_test.go b/bundle/tests/autoload_git_test.go deleted file mode 100644 index a1075198f..000000000 --- a/bundle/tests/autoload_git_test.go +++ /dev/null @@ -1,20 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestAutoLoad(t *testing.T) { - b := load(t, "./autoload_git") - assert.True(t, b.Config.Bundle.Git.Inferred) - assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") -} - -func TestManuallySetBranch(t *testing.T) { - b := loadEnvironment(t, "./autoload_git", "production") - assert.False(t, b.Config.Bundle.Git.Inferred) - assert.Equal(t, "main", b.Config.Bundle.Git.Branch) - assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") -} diff --git a/bundle/tests/git_branch_validation/.mock-git/HEAD b/bundle/tests/git_branch_validation/.mock-git/HEAD new file mode 100644 index 000000000..6c83ec9df --- /dev/null +++ b/bundle/tests/git_branch_validation/.mock-git/HEAD @@ -0,0 +1 @@ +ref: refs/heads/feature-b diff --git a/bundle/tests/git_branch_validation/databricks.yml b/bundle/tests/git_branch_validation/databricks.yml new file mode 100644 index 000000000..8c7b96efc --- /dev/null +++ b/bundle/tests/git_branch_validation/databricks.yml @@ -0,0 +1,4 @@ +bundle: + name: "Dancing Feet" + git: + branch: "feature-a" diff --git a/bundle/tests/git_test.go b/bundle/tests/git_test.go new file mode 100644 index 000000000..daab4d30a --- /dev/null +++ b/bundle/tests/git_test.go @@ -0,0 +1,39 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/assert" +) + +func TestGitAutoLoad(t *testing.T) { + b := load(t, "./autoload_git") + assert.True(t, b.Config.Bundle.Git.Inferred) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitManuallySetBranch(t *testing.T) { + b := loadEnvironment(t, "./autoload_git", "production") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "main", b.Config.Bundle.Git.Branch) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitBundleBranchValidation(t *testing.T) { + git.GitDirectoryName = ".mock-git" + t.Cleanup(func() { + git.GitDirectoryName = ".git" + }) + + b := load(t, "./git_branch_validation") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "feature-a", b.Config.Bundle.Git.Branch) + assert.Equal(t, "feature-b", b.Config.Bundle.Git.ActualBranch) + + err := bundle.Apply(context.Background(), b, mutator.ValidateGitDetails()) + assert.ErrorContains(t, err, "not on the right Git branch:") +} diff --git a/libs/git/repository.go b/libs/git/repository.go index 3b93669ae..2f19cff98 100644 --- a/libs/git/repository.go +++ b/libs/git/repository.go @@ -12,6 +12,8 @@ import ( const gitIgnoreFileName = ".gitignore" +var GitDirectoryName = ".git" + // Repository represents a Git repository or a directory // that could later be initialized as Git repository. type Repository struct { @@ -45,7 +47,7 @@ func (r *Repository) Root() string { func (r *Repository) CurrentBranch() (string, error) { // load .git/HEAD - ref, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", "HEAD")) + ref, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, "HEAD")) if err != nil { return "", err } @@ -62,7 +64,7 @@ func (r *Repository) CurrentBranch() (string, error) { func (r *Repository) LatestCommit() (string, error) { // load .git/HEAD - ref, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", "HEAD")) + ref, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, "HEAD")) if err != nil { return "", err } @@ -81,7 +83,7 @@ func (r *Repository) LatestCommit() (string, error) { if err != nil { return "", err } - branchHeadRef, err := LoadReferenceFile(filepath.Join(r.rootPath, ".git", branchHeadPath)) + branchHeadRef, err := LoadReferenceFile(filepath.Join(r.rootPath, GitDirectoryName, branchHeadPath)) if err != nil { return "", err } @@ -186,7 +188,7 @@ func NewRepository(path string) (*Repository, error) { } real := true - rootPath, err := folders.FindDirWithLeaf(path, ".git") + rootPath, err := folders.FindDirWithLeaf(path, GitDirectoryName) if err != nil { if !os.IsNotExist(err) { return nil, err From ee88b0be3c28ecf0be81197359cefff42b1bc849 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Aug 2023 12:09:02 +0200 Subject: [PATCH 046/139] Bump golang.org/x/term from 0.10.0 to 0.11.0 (#643) Bumps [golang.org/x/term](https://github.com/golang/term) from 0.10.0 to 0.11.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/term&package-manager=go_modules&previous-version=0.10.0&new-version=0.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 8f4051e1e..5b01ec5cc 100644 --- a/go.mod +++ b/go.mod @@ -26,7 +26,7 @@ require ( golang.org/x/mod v0.12.0 golang.org/x/oauth2 v0.10.0 golang.org/x/sync v0.3.0 - golang.org/x/term v0.10.0 + golang.org/x/term v0.11.0 golang.org/x/text v0.11.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 ) @@ -52,7 +52,7 @@ require ( go.opencensus.io v0.24.0 // indirect golang.org/x/crypto v0.11.0 // indirect golang.org/x/net v0.12.0 // indirect - golang.org/x/sys v0.10.0 // indirect + golang.org/x/sys v0.11.0 // indirect golang.org/x/time v0.3.0 // indirect google.golang.org/api v0.131.0 // indirect google.golang.org/appengine v1.6.7 // indirect diff --git a/go.sum b/go.sum index 38a551088..ff272354d 100644 --- a/go.sum +++ b/go.sum @@ -217,12 +217,12 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA= -golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM= +golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c= -golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o= +golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0= +golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= From a3de441fd28e3c4eaa400607bc5f43875b6de9bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Aug 2023 13:59:33 +0200 Subject: [PATCH 047/139] Bump golang.org/x/text from 0.11.0 to 0.12.0 (#642) Bumps [golang.org/x/text](https://github.com/golang/text) from 0.11.0 to 0.12.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/text&package-manager=go_modules&previous-version=0.11.0&new-version=0.12.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 5b01ec5cc..d9d0a3a37 100644 --- a/go.mod +++ b/go.mod @@ -27,7 +27,7 @@ require ( golang.org/x/oauth2 v0.10.0 golang.org/x/sync v0.3.0 golang.org/x/term v0.11.0 - golang.org/x/text v0.11.0 + golang.org/x/text v0.12.0 gopkg.in/ini.v1 v1.67.0 // Apache 2.0 ) diff --git a/go.sum b/go.sum index ff272354d..7af9032e5 100644 --- a/go.sum +++ b/go.sum @@ -229,8 +229,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ= -golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4= -golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= +golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc= +golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4= golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= From 54a1bcd10afaa65eff1a5c8b910950e9baf2ded6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 8 Aug 2023 12:17:48 +0000 Subject: [PATCH 048/139] Bump golang.org/x/oauth2 from 0.10.0 to 0.11.0 (#641) Bumps [golang.org/x/oauth2](https://github.com/golang/oauth2) from 0.10.0 to 0.11.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=golang.org/x/oauth2&package-manager=go_modules&previous-version=0.10.0&new-version=0.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 6 +++--- go.sum | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/go.mod b/go.mod index d9d0a3a37..c3efa91b2 100644 --- a/go.mod +++ b/go.mod @@ -24,7 +24,7 @@ require ( github.com/whilp/git-urls v1.0.0 // MIT golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 golang.org/x/mod v0.12.0 - golang.org/x/oauth2 v0.10.0 + golang.org/x/oauth2 v0.11.0 golang.org/x/sync v0.3.0 golang.org/x/term v0.11.0 golang.org/x/text v0.12.0 @@ -50,8 +50,8 @@ require ( github.com/pmezard/go-difflib v1.0.0 // indirect github.com/zclconf/go-cty v1.13.2 // indirect go.opencensus.io v0.24.0 // indirect - golang.org/x/crypto v0.11.0 // indirect - golang.org/x/net v0.12.0 // indirect + golang.org/x/crypto v0.12.0 // indirect + golang.org/x/net v0.14.0 // indirect golang.org/x/sys v0.11.0 // indirect golang.org/x/time v0.3.0 // indirect google.golang.org/api v0.131.0 // indirect diff --git a/go.sum b/go.sum index 7af9032e5..1edb3b48d 100644 --- a/go.sum +++ b/go.sum @@ -163,8 +163,8 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= -golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA= -golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio= +golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk= +golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0 h1:LGJsf5LRplCck6jUCH3dBL2dmycNruWNF5xugkSlfXw= golang.org/x/exp v0.0.0-20230310171629-522b1b587ee0/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc= @@ -187,12 +187,12 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50= -golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA= +golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14= +golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8= -golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI= +golang.org/x/oauth2 v0.11.0 h1:vPL4xzxBM4niKCW6g9whtaWVXTJf1U5e4aZxxFx/gbU= +golang.org/x/oauth2 v0.11.0/go.mod h1:LdF7O/8bLR/qWK9DrpXmbHLTouvRHK0SgJl0GmDBchk= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= From 6430d2345395c859ffab614baf28b19139ac0a6b Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Wed, 9 Aug 2023 11:22:42 +0200 Subject: [PATCH 049/139] Print y/n options when displaying prompts using cmdio.Ask (#650) ## Changes Adds `[y/n]` in `cmdio.Ask` to make the options obvious in all question prompts ## Tests Test manually. Works. --- bundle/deploy/files/delete.go | 2 +- bundle/deploy/terraform/destroy.go | 2 +- libs/cmdio/logger.go | 2 ++ 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/bundle/deploy/files/delete.go b/bundle/deploy/files/delete.go index 1f103bbd0..990eca47a 100644 --- a/bundle/deploy/files/delete.go +++ b/bundle/deploy/files/delete.go @@ -27,7 +27,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { red := color.New(color.FgRed).SprintFunc() if !b.AutoApprove { - proceed, err := cmdio.Ask(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?: ", b.Config.Workspace.RootPath, red("deleted permanently!"))) + proceed, err := cmdio.Ask(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) if err != nil { return err } diff --git a/bundle/deploy/terraform/destroy.go b/bundle/deploy/terraform/destroy.go index 839ea5f9c..649542f6f 100644 --- a/bundle/deploy/terraform/destroy.go +++ b/bundle/deploy/terraform/destroy.go @@ -89,7 +89,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { // Ask for confirmation, if needed if !b.Plan.ConfirmApply { red := color.New(color.FgRed).SprintFunc() - b.Plan.ConfirmApply, err = cmdio.Ask(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed? [y/n]: ", red("destroy"))) + b.Plan.ConfirmApply, err = cmdio.Ask(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) if err != nil { return err } diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index a507c5cce..3190a6a79 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -87,6 +87,8 @@ func (l *Logger) Ask(question string) (bool, error) { return false, fmt.Errorf("question prompts are not supported in json mode") } + // Add acceptable answers to the question prompt. + question += ` [y/n]:` l.Writer.Write([]byte(question)) ans, err := l.Reader.ReadString('\n') From 979b680c50d852ff299070992cd686082d7a0bf4 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 10 Aug 2023 11:22:38 +0200 Subject: [PATCH 050/139] Release v0.203.0 (#653) CLI: * Infer host from profile during `auth login` ([#629](https://github.com/databricks/cli/pull/629)). Bundles: * Extend deployment mode support ([#577](https://github.com/databricks/cli/pull/577)). * Add validation for Git settings in bundles ([#578](https://github.com/databricks/cli/pull/578)). * Only treat files with .tmpl extension as templates ([#594](https://github.com/databricks/cli/pull/594)). * Add JSON schema validation for input template parameters ([#598](https://github.com/databricks/cli/pull/598)). * Add DATABRICKS_BUNDLE_INCLUDE_PATHS to specify include paths through env vars ([#591](https://github.com/databricks/cli/pull/591)). * Initialise a empty default bundle if BUNDLE_ROOT and DATABRICKS_BUNDLE_INCLUDES env vars are present ([#604](https://github.com/databricks/cli/pull/604)). * Regenerate bundle resource structs from latest Terraform provider ([#633](https://github.com/databricks/cli/pull/633)). * Fixed processing jobs libraries with remote path ([#638](https://github.com/databricks/cli/pull/638)). * Add unit test for file name execution during rendering ([#640](https://github.com/databricks/cli/pull/640)). * Add bundle init command and support for prompting user for input values ([#631](https://github.com/databricks/cli/pull/631)). * Fix bundle git branch validation ([#645](https://github.com/databricks/cli/pull/645)). Internal: * Fix mkdir integration test on GCP ([#620](https://github.com/databricks/cli/pull/620)). * Fix git clone integration test for non-existing repo ([#610](https://github.com/databricks/cli/pull/610)). * Remove push to main trigger for build workflow ([#621](https://github.com/databricks/cli/pull/621)). * Remove workflow to publish binaries to S3 ([#622](https://github.com/databricks/cli/pull/622)). * Fix failing fs mkdir test on azure ([#627](https://github.com/databricks/cli/pull/627)). * Print y/n options when displaying prompts using cmdio.Ask ([#650](https://github.com/databricks/cli/pull/650)). API Changes: * Changed `databricks account metastore-assignments create` command to not return anything. * Added `databricks account network-policy` command group. OpenAPI commit 7b57ba3a53f4de3d049b6a24391fe5474212daf8 (2023-07-28) Dependency updates: * Bump OpenAPI specification & Go SDK Version ([#624](https://github.com/databricks/cli/pull/624)). * Bump golang.org/x/term from 0.10.0 to 0.11.0 ([#643](https://github.com/databricks/cli/pull/643)). * Bump golang.org/x/text from 0.11.0 to 0.12.0 ([#642](https://github.com/databricks/cli/pull/642)). * Bump golang.org/x/oauth2 from 0.10.0 to 0.11.0 ([#641](https://github.com/databricks/cli/pull/641)). --- CHANGELOG.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index f0b1f6968..6cf7673b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,43 @@ # Version changelog +## 0.203.0 + +CLI: + * Infer host from profile during `auth login` ([#629](https://github.com/databricks/cli/pull/629)). + +Bundles: + * Extend deployment mode support ([#577](https://github.com/databricks/cli/pull/577)). + * Add validation for Git settings in bundles ([#578](https://github.com/databricks/cli/pull/578)). + * Only treat files with .tmpl extension as templates ([#594](https://github.com/databricks/cli/pull/594)). + * Add JSON schema validation for input template parameters ([#598](https://github.com/databricks/cli/pull/598)). + * Add DATABRICKS_BUNDLE_INCLUDE_PATHS to specify include paths through env vars ([#591](https://github.com/databricks/cli/pull/591)). + * Initialise a empty default bundle if BUNDLE_ROOT and DATABRICKS_BUNDLE_INCLUDES env vars are present ([#604](https://github.com/databricks/cli/pull/604)). + * Regenerate bundle resource structs from latest Terraform provider ([#633](https://github.com/databricks/cli/pull/633)). + * Fixed processing jobs libraries with remote path ([#638](https://github.com/databricks/cli/pull/638)). + * Add unit test for file name execution during rendering ([#640](https://github.com/databricks/cli/pull/640)). + * Add bundle init command and support for prompting user for input values ([#631](https://github.com/databricks/cli/pull/631)). + * Fix bundle git branch validation ([#645](https://github.com/databricks/cli/pull/645)). + +Internal: + * Fix mkdir integration test on GCP ([#620](https://github.com/databricks/cli/pull/620)). + * Fix git clone integration test for non-existing repo ([#610](https://github.com/databricks/cli/pull/610)). + * Remove push to main trigger for build workflow ([#621](https://github.com/databricks/cli/pull/621)). + * Remove workflow to publish binaries to S3 ([#622](https://github.com/databricks/cli/pull/622)). + * Fix failing fs mkdir test on azure ([#627](https://github.com/databricks/cli/pull/627)). + * Print y/n options when displaying prompts using cmdio.Ask ([#650](https://github.com/databricks/cli/pull/650)). + +API Changes: + * Changed `databricks account metastore-assignments create` command to not return anything. + * Added `databricks account network-policy` command group. + +OpenAPI commit 7b57ba3a53f4de3d049b6a24391fe5474212daf8 (2023-07-28) + +Dependency updates: + * Bump OpenAPI specification & Go SDK Version ([#624](https://github.com/databricks/cli/pull/624)). + * Bump golang.org/x/term from 0.10.0 to 0.11.0 ([#643](https://github.com/databricks/cli/pull/643)). + * Bump golang.org/x/text from 0.11.0 to 0.12.0 ([#642](https://github.com/databricks/cli/pull/642)). + * Bump golang.org/x/oauth2 from 0.10.0 to 0.11.0 ([#641](https://github.com/databricks/cli/pull/641)). + ## 0.202.0 Breaking Change: From 2a58253d20ad6d48c55f138f0c92f62544fa7c28 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 10 Aug 2023 11:36:42 +0200 Subject: [PATCH 051/139] Consolidate functions in libs/git (#652) ## Changes The functions in `libs/git/git.go` assumed global state (e.g. working directory) and were no longer used. This change consolidates the functionality to turn an origin URL into an HTTPS URL. Closes #187. ## Tests Expanded existing unit test. --- libs/git/git.go | 80 ------------------------------------------ libs/git/git_test.go | 22 ------------ libs/git/utils.go | 24 ++++++------- libs/git/utils_test.go | 15 ++++++-- 4 files changed, 24 insertions(+), 117 deletions(-) delete mode 100644 libs/git/git.go delete mode 100644 libs/git/git_test.go diff --git a/libs/git/git.go b/libs/git/git.go deleted file mode 100644 index c5d09034c..000000000 --- a/libs/git/git.go +++ /dev/null @@ -1,80 +0,0 @@ -package git - -import ( - "fmt" - "net/url" - "os" - "path" - "strings" - - "github.com/databricks/cli/folders" - giturls "github.com/whilp/git-urls" - "gopkg.in/ini.v1" -) - -func Root() (string, error) { - wd, err := os.Getwd() - if err != nil { - return "", err - } - return folders.FindDirWithLeaf(wd, ".git") -} - -// Origin finds the git repository the project is cloned from, so that -// we could automatically verify if this project is checked out in repos -// home folder of the user according to recommended best practices. Can -// also be used to determine a good enough default project name. -func Origin() (*url.URL, error) { - root, err := Root() - if err != nil { - return nil, err - } - file := fmt.Sprintf("%s/.git/config", root) - gitConfig, err := ini.Load(file) - if err != nil { - return nil, err - } - section := gitConfig.Section(`remote "origin"`) - if section == nil { - return nil, fmt.Errorf("remote `origin` is not defined in %s", file) - } - url := section.Key("url") - if url == nil { - return nil, fmt.Errorf("git origin url is not defined") - } - return giturls.Parse(url.Value()) -} - -// HttpsOrigin returns URL in the format expected by Databricks Repos -// platform functionality. Gradually expand implementation to work with -// other formats of git URLs. -func HttpsOrigin() (string, error) { - origin, err := Origin() - if err != nil { - return "", err - } - // if current repo is checked out with a SSH key - if origin.Scheme != "https" { - origin.Scheme = "https" - } - // `git@` is not required for HTTPS, as Databricks Repos are checked - // out using an API token instead of username. But does it hold true - // for all of the git implementations? - if origin.User != nil { - origin.User = nil - } - // Remove `.git` suffix, if present. - origin.Path = strings.TrimSuffix(origin.Path, ".git") - return origin.String(), nil -} - -// RepositoryName returns repository name as last path entry from detected -// git repository up the tree or returns error if it fails to do so. -func RepositoryName() (string, error) { - origin, err := Origin() - if err != nil { - return "", err - } - base := path.Base(origin.Path) - return strings.TrimSuffix(base, ".git"), nil -} diff --git a/libs/git/git_test.go b/libs/git/git_test.go deleted file mode 100644 index 818ba8421..000000000 --- a/libs/git/git_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package git - -import ( - "strings" - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestGetGitOrigin(t *testing.T) { - this, err := RepositoryName() - assert.NoError(t, err) - assert.Equal(t, "cli", this) -} - -func TestHttpsOrigin(t *testing.T) { - url, err := HttpsOrigin() - assert.NoError(t, err) - // must pass on the upcoming forks - assert.True(t, strings.HasPrefix(url, "https://github.com"), url) - assert.True(t, strings.HasSuffix(url, "cli"), url) -} diff --git a/libs/git/utils.go b/libs/git/utils.go index 13ce2c9e6..1d38da3aa 100644 --- a/libs/git/utils.go +++ b/libs/git/utils.go @@ -6,23 +6,23 @@ import ( giturls "github.com/whilp/git-urls" ) +// Return an origin URL as an HTTPS URL. +// The transformations in this function are not guaranteed to work for all +// Git providers. They are only guaranteed to work for GitHub. func ToHttpsUrl(url string) (string, error) { - originUrl, err := giturls.Parse(url) + origin, err := giturls.Parse(url) if err != nil { return "", err } - if originUrl.Scheme == "https" { - return originUrl.String(), nil + // If this repository is checked out over SSH + if origin.Scheme != "https" { + origin.Scheme = "https" } - // if current repo is checked out with a SSH key - if originUrl.Scheme != "https" { - originUrl.Scheme = "https" - } - // `git@` is not required for HTTPS - if originUrl.User != nil { - originUrl.User = nil + // Basic auth is not applicable for an HTTPS URL. + if origin.User != nil { + origin.User = nil } // Remove `.git` suffix, if present. - originUrl.Path = strings.TrimSuffix(originUrl.Path, ".git") - return originUrl.String(), nil + origin.Path = strings.TrimSuffix(origin.Path, ".git") + return origin.String(), nil } diff --git a/libs/git/utils_test.go b/libs/git/utils_test.go index 52a912da5..2a77cae16 100644 --- a/libs/git/utils_test.go +++ b/libs/git/utils_test.go @@ -7,7 +7,16 @@ import ( ) func TestToHttpsUrlForSsh(t *testing.T) { - url, err := ToHttpsUrl("user@foo.com:org/repo-name.git") - assert.NoError(t, err) - assert.Equal(t, "https://foo.com/org/repo-name", url) + for _, e := range []struct { + url string + expected string + }{ + {"user@foo.com:org/repo-name.git", "https://foo.com/org/repo-name"}, + {"git@github.com:databricks/cli.git", "https://github.com/databricks/cli"}, + {"https://github.com/databricks/cli.git", "https://github.com/databricks/cli"}, + } { + url, err := ToHttpsUrl(e.url) + assert.NoError(t, err) + assert.Equal(t, e.expected, url) + } } From 6b615ccfb46551d565e70aeff1294b1d24c5b3bb Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 10 Aug 2023 12:03:52 +0200 Subject: [PATCH 052/139] Add internal tag for bundle fields to be skipped from schema (#636) ## Changes This PR: 1. Introduces the "internal" tag to bundle configs that should not be visible to customers. 2. Annotates "metadata_service_url" as an internal field. ## Tests Unit tests. --- bundle/config/workspace.go | 2 +- bundle/schema/schema.go | 10 ++++++- bundle/schema/schema_test.go | 52 ++++++++++++++++++++++++++++++++++++ 3 files changed, 62 insertions(+), 2 deletions(-) diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index f278ea179..bd116a9cb 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -24,7 +24,7 @@ type Workspace struct { Host string `json:"host,omitempty"` Profile string `json:"profile,omitempty"` AuthType string `json:"auth_type,omitempty"` - MetadataServiceURL string `json:"metadata_service_url,omitempty"` + MetadataServiceURL string `json:"metadata_service_url,omitempty" bundle:"internal"` // OAuth specific attributes. ClientID string `json:"client_id,omitempty"` diff --git a/bundle/schema/schema.go b/bundle/schema/schema.go index fee9b676a..00dd27192 100644 --- a/bundle/schema/schema.go +++ b/bundle/schema/schema.go @@ -9,6 +9,14 @@ import ( "github.com/databricks/cli/libs/jsonschema" ) +// Fields tagged "readonly" should not be emitted in the schema as they are +// computed at runtime, and should not be assigned a value by the bundle author. +const readonlyTag = "readonly" + +// Annotation for internal bundle fields that should not be exposed to customers. +// Fields can be tagged as "internal" to remove them from the generated schema. +const internalTag = "internal" + // This function translates golang types into json schema. Here is the mapping // between json schema types and golang types // @@ -197,7 +205,7 @@ func toSchema(golangType reflect.Type, docs *Docs, tracker *tracker) (*jsonschem required := []string{} for _, child := range children { bundleTag := child.Tag.Get("bundle") - if bundleTag == "readonly" { + if bundleTag == readonlyTag || bundleTag == internalTag { continue } diff --git a/bundle/schema/schema_test.go b/bundle/schema/schema_test.go index 66baf8736..d44a2082a 100644 --- a/bundle/schema/schema_test.go +++ b/bundle/schema/schema_test.go @@ -1462,3 +1462,55 @@ func TestBundleReadOnlytag(t *testing.T) { t.Log("[DEBUG] expected: ", expected) assert.Equal(t, expected, string(jsonSchema)) } + +func TestBundleInternalTag(t *testing.T) { + type Pokemon struct { + Pikachu string `json:"pikachu" bundle:"internal"` + Raichu string `json:"raichu"` + } + + type Foo struct { + Pokemon *Pokemon `json:"pokemon"` + Apple int `json:"apple"` + Mango string `json:"mango" bundle:"internal"` + } + + elem := Foo{} + + schema, err := New(reflect.TypeOf(elem), nil) + assert.NoError(t, err) + + jsonSchema, err := json.MarshalIndent(schema, " ", " ") + assert.NoError(t, err) + + expected := + `{ + "type": "object", + "properties": { + "apple": { + "type": "number" + }, + "pokemon": { + "type": "object", + "properties": { + "raichu": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "raichu" + ] + } + }, + "additionalProperties": false, + "required": [ + "pokemon", + "apple" + ] + }` + + t.Log("[DEBUG] actual: ", string(jsonSchema)) + t.Log("[DEBUG] expected: ", expected) + assert.Equal(t, expected, string(jsonSchema)) +} From 8656c4a1fad95f349a778b18c73303df6754632f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Fri, 11 Aug 2023 14:28:05 +0200 Subject: [PATCH 053/139] Log the bundle root configuration file if applicable (#657) ## Changes Pass through the `context.Context` to the bundle loader functions. ## Tests Unit tests pass. --- bundle/bundle.go | 14 ++++++++----- bundle/bundle_test.go | 21 ++++++++++--------- bundle/root_test.go | 9 ++++---- bundle/tests/bundle/wheel_test.go | 21 +++++++++++-------- bundle/tests/conflicting_resource_ids_test.go | 13 +++++++----- bundle/tests/include_test.go | 5 +++-- bundle/tests/loader.go | 5 +++-- cmd/root/bundle.go | 9 ++++---- cmd/root/bundle_test.go | 2 +- 9 files changed, 57 insertions(+), 42 deletions(-) diff --git a/bundle/bundle.go b/bundle/bundle.go index 0147883ca..06c68fe8a 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -7,6 +7,7 @@ package bundle import ( + "context" "fmt" "os" "path/filepath" @@ -16,6 +17,7 @@ import ( "github.com/databricks/cli/folders" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/locker" + "github.com/databricks/cli/libs/log" "github.com/databricks/cli/libs/terraform" "github.com/databricks/databricks-sdk-go" sdkconfig "github.com/databricks/databricks-sdk-go/config" @@ -45,7 +47,7 @@ type Bundle struct { const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES" -func Load(path string) (*Bundle, error) { +func Load(ctx context.Context, path string) (*Bundle, error) { bundle := &Bundle{} stat, err := os.Stat(path) if err != nil { @@ -56,6 +58,7 @@ func Load(path string) (*Bundle, error) { _, hasIncludePathEnv := os.LookupEnv(ExtraIncludePathsKey) _, hasBundleRootEnv := os.LookupEnv(envBundleRoot) if hasIncludePathEnv && hasBundleRootEnv && stat.IsDir() { + log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) bundle.Config = config.Root{ Path: path, Bundle: config.Bundle{ @@ -66,6 +69,7 @@ func Load(path string) (*Bundle, error) { } return nil, err } + log.Debugf(ctx, "Loading bundle configuration from: %s", configFile) err = bundle.Config.Load(configFile) if err != nil { return nil, err @@ -75,19 +79,19 @@ func Load(path string) (*Bundle, error) { // MustLoad returns a bundle configuration. // It returns an error if a bundle was not found or could not be loaded. -func MustLoad() (*Bundle, error) { +func MustLoad(ctx context.Context) (*Bundle, error) { root, err := mustGetRoot() if err != nil { return nil, err } - return Load(root) + return Load(ctx, root) } // TryLoad returns a bundle configuration if there is one, but doesn't fail if there isn't one. // It returns an error if a bundle was found but could not be loaded. // It returns a `nil` bundle if a bundle was not found. -func TryLoad() (*Bundle, error) { +func TryLoad(ctx context.Context) (*Bundle, error) { root, err := tryGetRoot() if err != nil { return nil, err @@ -98,7 +102,7 @@ func TryLoad() (*Bundle, error) { return nil, nil } - return Load(root) + return Load(ctx, root) } func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient { diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index 18550f4f2..ac9475000 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -1,6 +1,7 @@ package bundle import ( + "context" "os" "path/filepath" "testing" @@ -10,13 +11,13 @@ import ( ) func TestLoadNotExists(t *testing.T) { - b, err := Load("/doesntexist") + b, err := Load(context.Background(), "/doesntexist") assert.True(t, os.IsNotExist(err)) assert.Nil(t, b) } func TestLoadExists(t *testing.T) { - b, err := Load("./tests/basic") + b, err := Load(context.Background(), "./tests/basic") require.Nil(t, err) assert.Equal(t, "basic", b.Config.Bundle.Name) } @@ -27,7 +28,7 @@ func TestBundleCacheDir(t *testing.T) { require.NoError(t, err) f1.Close() - bundle, err := Load(projectDir) + bundle, err := Load(context.Background(), projectDir) require.NoError(t, err) // Artificially set environment. @@ -51,7 +52,7 @@ func TestBundleCacheDirOverride(t *testing.T) { require.NoError(t, err) f1.Close() - bundle, err := Load(projectDir) + bundle, err := Load(context.Background(), projectDir) require.NoError(t, err) // Artificially set environment. @@ -70,39 +71,39 @@ func TestBundleCacheDirOverride(t *testing.T) { func TestBundleMustLoadSuccess(t *testing.T) { t.Setenv(envBundleRoot, "./tests/basic") - b, err := MustLoad() + b, err := MustLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleMustLoadFailureWithEnv(t *testing.T) { t.Setenv(envBundleRoot, "./tests/doesntexist") - _, err := MustLoad() + _, err := MustLoad(context.Background()) require.Error(t, err, "not a directory") } func TestBundleMustLoadFailureIfNotFound(t *testing.T) { chdir(t, t.TempDir()) - _, err := MustLoad() + _, err := MustLoad(context.Background()) require.Error(t, err, "unable to find bundle root") } func TestBundleTryLoadSuccess(t *testing.T) { t.Setenv(envBundleRoot, "./tests/basic") - b, err := TryLoad() + b, err := TryLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleTryLoadFailureWithEnv(t *testing.T) { t.Setenv(envBundleRoot, "./tests/doesntexist") - _, err := TryLoad() + _, err := TryLoad(context.Background()) require.Error(t, err, "not a directory") } func TestBundleTryLoadOkIfNotFound(t *testing.T) { chdir(t, t.TempDir()) - b, err := TryLoad() + b, err := TryLoad(context.Background()) assert.NoError(t, err) assert.Nil(t, b) } diff --git a/bundle/root_test.go b/bundle/root_test.go index e85c4fdcb..0c4c46aaf 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -1,6 +1,7 @@ package bundle import ( + "context" "os" "path/filepath" "testing" @@ -108,7 +109,7 @@ func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { chdir(t, filepath.Join(".", "tests", "basic")) t.Setenv(ExtraIncludePathsKey, "test") - bundle, err := MustLoad() + bundle, err := MustLoad(context.Background()) assert.NoError(t, err) assert.Equal(t, "basic", bundle.Config.Bundle.Name) @@ -123,7 +124,7 @@ func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { t.Setenv(envBundleRoot, dir) t.Setenv(ExtraIncludePathsKey, "test") - bundle, err := MustLoad() + bundle, err := MustLoad(context.Background()) assert.NoError(t, err) assert.Equal(t, dir, bundle.Config.Path) } @@ -133,7 +134,7 @@ func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { chdir(t, dir) t.Setenv(ExtraIncludePathsKey, "test") - _, err := MustLoad() + _, err := MustLoad(context.Background()) assert.Error(t, err) } @@ -142,6 +143,6 @@ func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) { chdir(t, dir) t.Setenv(envBundleRoot, dir) - _, err := MustLoad() + _, err := MustLoad(context.Background()) assert.Error(t, err) } diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index bfc1fa04a..ee7457735 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -12,11 +12,12 @@ import ( ) func TestBundlePythonWheelBuild(t *testing.T) { - b, err := bundle.Load("./python_wheel") + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel") require.NoError(t, err) m := phases.Build() - err = m.Apply(context.Background(), b) + err = m.Apply(ctx, b) require.NoError(t, err) matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -24,16 +25,17 @@ func TestBundlePythonWheelBuild(t *testing.T) { require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = match.Apply(context.Background(), b) + err = match.Apply(ctx, b) require.NoError(t, err) } func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { - b, err := bundle.Load("./python_wheel_no_artifact") + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_no_artifact") require.NoError(t, err) m := phases.Build() - err = m.Apply(context.Background(), b) + err = m.Apply(ctx, b) require.NoError(t, err) matches, err := filepath.Glob("python_wheel/my_test_code/dist/my_test_code-*.whl") @@ -41,19 +43,20 @@ func TestBundlePythonWheelBuildAutoDetect(t *testing.T) { require.Equal(t, 1, len(matches)) match := libraries.MatchWithArtifacts() - err = match.Apply(context.Background(), b) + err = match.Apply(ctx, b) require.NoError(t, err) } func TestBundlePythonWheelWithDBFSLib(t *testing.T) { - b, err := bundle.Load("./python_wheel_dbfs_lib") + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_dbfs_lib") require.NoError(t, err) m := phases.Build() - err = m.Apply(context.Background(), b) + err = m.Apply(ctx, b) require.NoError(t, err) match := libraries.MatchWithArtifacts() - err = match.Apply(context.Background(), b) + err = match.Apply(ctx, b) require.NoError(t, err) } diff --git a/bundle/tests/conflicting_resource_ids_test.go b/bundle/tests/conflicting_resource_ids_test.go index b75e3753f..704683ad5 100644 --- a/bundle/tests/conflicting_resource_ids_test.go +++ b/bundle/tests/conflicting_resource_ids_test.go @@ -13,24 +13,27 @@ import ( ) func TestConflictingResourceIdsNoSubconfig(t *testing.T) { - _, err := bundle.Load("./conflicting_resource_ids/no_subconfigurations") + ctx := context.Background() + _, err := bundle.Load(ctx, "./conflicting_resource_ids/no_subconfigurations") bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/no_subconfigurations/databricks.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, bundleConfigPath)) } func TestConflictingResourceIdsOneSubconfig(t *testing.T) { - b, err := bundle.Load("./conflicting_resource_ids/one_subconfiguration") + ctx := context.Background() + b, err := bundle.Load(ctx, "./conflicting_resource_ids/one_subconfiguration") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) bundleConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/databricks.yml") resourcesConfigPath := filepath.FromSlash("conflicting_resource_ids/one_subconfiguration/resources.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", bundleConfigPath, resourcesConfigPath)) } func TestConflictingResourceIdsTwoSubconfigs(t *testing.T) { - b, err := bundle.Load("./conflicting_resource_ids/two_subconfigurations") + ctx := context.Background() + b, err := bundle.Load(ctx, "./conflicting_resource_ids/two_subconfigurations") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) resources1ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources1.yml") resources2ConfigPath := filepath.FromSlash("conflicting_resource_ids/two_subconfigurations/resources2.yml") assert.ErrorContains(t, err, fmt.Sprintf("multiple resources named foo (job at %s, pipeline at %s)", resources1ConfigPath, resources2ConfigPath)) diff --git a/bundle/tests/include_test.go b/bundle/tests/include_test.go index 00aecb9fd..eb09d1aa0 100644 --- a/bundle/tests/include_test.go +++ b/bundle/tests/include_test.go @@ -14,9 +14,10 @@ import ( ) func TestIncludeInvalid(t *testing.T) { - b, err := bundle.Load("./include_invalid") + ctx := context.Background() + b, err := bundle.Load(ctx, "./include_invalid") require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) require.Error(t, err) assert.Contains(t, err.Error(), "notexists.yml defined in 'include' section does not match any files") } diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 42f1fc5be..056a82d91 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -10,9 +10,10 @@ import ( ) func load(t *testing.T, path string) *bundle.Bundle { - b, err := bundle.Load(path) + ctx := context.Background() + b, err := bundle.Load(ctx, path) require.NoError(t, err) - err = bundle.Apply(context.Background(), b, bundle.Seq(mutator.DefaultMutators()...)) + err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) require.NoError(t, err) return b } diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 8a3b5977f..f691bbfc2 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -1,6 +1,7 @@ package root import ( + "context" "os" "github.com/databricks/cli/bundle" @@ -41,8 +42,9 @@ func getProfile(cmd *cobra.Command) (value string) { } // loadBundle loads the bundle configuration and applies default mutators. -func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) (*bundle.Bundle, error) { - b, err := load() +func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) (*bundle.Bundle, error) { + ctx := cmd.Context() + b, err := load(ctx) if err != nil { return nil, err } @@ -57,7 +59,6 @@ func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, b.Config.Workspace.Profile = profile } - ctx := cmd.Context() err = bundle.Apply(ctx, b, bundle.Seq(mutator.DefaultMutators()...)) if err != nil { return nil, err @@ -67,7 +68,7 @@ func loadBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, } // configureBundle loads the bundle configuration and configures it on the command's context. -func configureBundle(cmd *cobra.Command, args []string, load func() (*bundle.Bundle, error)) error { +func configureBundle(cmd *cobra.Command, args []string, load func(ctx context.Context) (*bundle.Bundle, error)) error { b, err := loadBundle(cmd, args, load) if err != nil { return err diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 4b44e019b..4382cf22f 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -39,7 +39,7 @@ func emptyCommand(t *testing.T) *cobra.Command { func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { setupDatabricksCfg(t) - err := configureBundle(cmd, []string{"validate"}, func() (*bundle.Bundle, error) { + err := configureBundle(cmd, []string{"validate"}, func(_ context.Context) (*bundle.Bundle, error) { return &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ From 6ea70c82a93805a90a112f916c21c0659947b272 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 11 Aug 2023 15:48:32 +0200 Subject: [PATCH 054/139] Execute paths without the .tmpl extension as templates (#654) ## Changes The `.tmpl` extension is only meant as a qualifier for whether the file content is executed as a template. All file paths in the `template` directory should be treated as valid go text templates. Before only paths with the `.tmpl` extensions would be resolved as templates, after this change, all file paths are interpreted as templates. ## Tests Unit test. The newly added unit tests also asserts that the file path is correct, even when the `.tmpl` extension is missing. --- libs/template/renderer.go | 19 +++++++++++-------- libs/template/renderer_test.go | 14 ++++++++++++++ .../template-in-path/library/my_funcs.tmpl | 7 +++++++ .../{{template `file_name`}} | 0 4 files changed, 32 insertions(+), 8 deletions(-) create mode 100644 libs/template/testdata/template-in-path/library/my_funcs.tmpl create mode 100644 libs/template/testdata/template-in-path/template/{{template `dir_name`}}/{{template `file_name`}} diff --git a/libs/template/renderer.go b/libs/template/renderer.go index c7e79841c..76479c05d 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -124,19 +124,29 @@ func (r *renderer) computeFile(relPathTemplate string) (file, error) { } perm := info.Mode().Perm() + // Execute relative path template to get destination path for the file + relPath, err := r.executeTemplate(relPathTemplate) + if err != nil { + return nil, err + } + // If file name does not specify the `.tmpl` extension, then it is copied // over as is, without treating it as a template if !strings.HasSuffix(relPathTemplate, templateExtension) { return ©File{ dstPath: &destinationPath{ root: r.instanceRoot, - relPath: relPathTemplate, + relPath: relPath, }, perm: perm, ctx: r.ctx, srcPath: relPathTemplate, srcFiler: r.templateFiler, }, nil + } else { + // Trim the .tmpl suffix from file name, if specified in the template + // path + relPath = strings.TrimSuffix(relPath, templateExtension) } // read template file's content @@ -160,13 +170,6 @@ func (r *renderer) computeFile(relPathTemplate string) (file, error) { return nil, fmt.Errorf("failed to compute file content for %s. %w", relPathTemplate, err) } - // Execute relative path template to get materialized path for the file - relPathTemplate = strings.TrimSuffix(relPathTemplate, templateExtension) - relPath, err := r.executeTemplate(relPathTemplate) - if err != nil { - return nil, err - } - return &inMemoryFile{ dstPath: &destinationPath{ root: r.instanceRoot, diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index 37b94b1ee..f3f7f2345 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -459,3 +459,17 @@ func TestRendererFileTreeRendering(t *testing.T) { assert.DirExists(t, filepath.Join(tmpDir, "my_directory")) assert.FileExists(t, filepath.Join(tmpDir, "my_directory", "my_file")) } + +func TestRendererSubTemplateInPath(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/template-in-path/template", "./testdata/template-in-path/library", tmpDir) + require.NoError(t, err) + + err = r.walk() + require.NoError(t, err) + + assert.Equal(t, filepath.Join(tmpDir, "my_directory", "my_file"), r.files[0].DstPath().absPath()) + assert.Equal(t, "my_directory/my_file", r.files[0].DstPath().relPath) +} diff --git a/libs/template/testdata/template-in-path/library/my_funcs.tmpl b/libs/template/testdata/template-in-path/library/my_funcs.tmpl new file mode 100644 index 000000000..3415ad774 --- /dev/null +++ b/libs/template/testdata/template-in-path/library/my_funcs.tmpl @@ -0,0 +1,7 @@ +{{define "dir_name" -}} +my_directory +{{- end}} + +{{define "file_name" -}} +my_file +{{- end}} diff --git a/libs/template/testdata/template-in-path/template/{{template `dir_name`}}/{{template `file_name`}} b/libs/template/testdata/template-in-path/template/{{template `dir_name`}}/{{template `file_name`}} new file mode 100644 index 000000000..e69de29bb From 97699b849fc8431741e19518531ef5e17834c201 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 14 Aug 2023 08:43:45 +0200 Subject: [PATCH 055/139] Enable environment overrides for job clusters (#658) ## Changes While they are a slice, we can identify a job cluster by its job cluster key. A job definition with multiple job clusters with the same key is always invalid. We can therefore merge definitions with the same key into one. This is compatible with how environment overrides are applied; merging a slice means appending to it. The override will end up in the job cluster slice of the original, which gives us a deterministic way to merge them. Since the alternative is an invalid configuration, this doesn't change behavior. ## Tests New test coverage. --- bundle/config/resources.go | 11 ++++ bundle/config/resources/job.go | 38 ++++++++++++- bundle/config/resources/job_test.go | 57 +++++++++++++++++++ bundle/config/root.go | 5 ++ .../tests/override_job_cluster/databricks.yml | 35 ++++++++++++ bundle/tests/override_job_cluster_test.go | 29 ++++++++++ 6 files changed, 174 insertions(+), 1 deletion(-) create mode 100644 bundle/config/resources/job_test.go create mode 100644 bundle/tests/override_job_cluster/databricks.yml create mode 100644 bundle/tests/override_job_cluster_test.go diff --git a/bundle/config/resources.go b/bundle/config/resources.go index fc86647eb..b15158b45 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -113,3 +113,14 @@ func (r *Resources) SetConfigFilePath(path string) { e.ConfigFilePath = path } } + +// MergeJobClusters iterates over all jobs and merges their job clusters. +// This is called after applying the environment overrides. +func (r *Resources) MergeJobClusters() error { + for _, job := range r.Jobs { + if err := job.MergeJobClusters(); err != nil { + return err + } + } + return nil +} diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index a1ea3855e..327d7e13e 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -1,6 +1,9 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/jobs" +import ( + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/imdario/mergo" +) type Job struct { ID string `json:"id,omitempty" bundle:"readonly"` @@ -10,3 +13,36 @@ type Job struct { *jobs.JobSettings } + +// MergeJobClusters merges job clusters with the same key. +// The job clusters field is a slice, and as such, overrides are appended to it. +// We can identify a job cluster by its key, however, so we can use this key +// to figure out which definitions are actually overrides and merge them. +func (j *Job) MergeJobClusters() error { + keys := make(map[string]*jobs.JobCluster) + output := make([]jobs.JobCluster, 0, len(j.JobClusters)) + + // Environment overrides are always appended, so we can iterate in natural order to + // first find the base definition, and merge instances we encounter later. + for i := range j.JobClusters { + key := j.JobClusters[i].JobClusterKey + + // Register job cluster with key if not yet seen before. + ref, ok := keys[key] + if !ok { + output = append(output, j.JobClusters[i]) + keys[key] = &j.JobClusters[i] + continue + } + + // Merge this instance into the reference. + err := mergo.Merge(ref, &j.JobClusters[i], mergo.WithOverride, mergo.WithAppendSlice) + if err != nil { + return err + } + } + + // Overwrite resulting slice. + j.JobClusters = output + return nil +} diff --git a/bundle/config/resources/job_test.go b/bundle/config/resources/job_test.go new file mode 100644 index 000000000..2ff3205e0 --- /dev/null +++ b/bundle/config/resources/job_test.go @@ -0,0 +1,57 @@ +package resources + +import ( + "testing" + + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestJobMergeJobClusters(t *testing.T) { + j := &Job{ + JobSettings: &jobs.JobSettings{ + JobClusters: []jobs.JobCluster{ + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "13.3.x-scala2.12", + NodeTypeId: "i3.xlarge", + NumWorkers: 2, + }, + }, + { + JobClusterKey: "bar", + NewCluster: &compute.ClusterSpec{ + SparkVersion: "10.4.x-scala2.12", + }, + }, + { + JobClusterKey: "foo", + NewCluster: &compute.ClusterSpec{ + NodeTypeId: "i3.2xlarge", + NumWorkers: 4, + }, + }, + }, + }, + } + + err := j.MergeJobClusters() + require.NoError(t, err) + + assert.Len(t, j.JobClusters, 2) + assert.Equal(t, "foo", j.JobClusters[0].JobClusterKey) + assert.Equal(t, "bar", j.JobClusters[1].JobClusterKey) + + // This job cluster was merged with a subsequent one. + jc0 := j.JobClusters[0].NewCluster + assert.Equal(t, "13.3.x-scala2.12", jc0.SparkVersion) + assert.Equal(t, "i3.2xlarge", jc0.NodeTypeId) + assert.Equal(t, 4, jc0.NumWorkers) + + // This job cluster was left untouched. + jc1 := j.JobClusters[1].NewCluster + assert.Equal(t, "10.4.x-scala2.12", jc1.SparkVersion) +} diff --git a/bundle/config/root.go b/bundle/config/root.go index 52f887378..4ca9d0a05 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -203,6 +203,11 @@ func (r *Root) MergeEnvironment(env *Environment) error { if err != nil { return err } + + err = r.Resources.MergeJobClusters() + if err != nil { + return err + } } if env.Variables != nil { diff --git a/bundle/tests/override_job_cluster/databricks.yml b/bundle/tests/override_job_cluster/databricks.yml new file mode 100644 index 000000000..33061b2e3 --- /dev/null +++ b/bundle/tests/override_job_cluster/databricks.yml @@ -0,0 +1,35 @@ +bundle: + name: override_job_cluster + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + foo: + name: job + job_clusters: + - job_cluster_key: key + new_cluster: + spark_version: 13.3.x-scala2.12 + +environments: + development: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.xlarge + num_workers: 1 + + staging: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.2xlarge + num_workers: 4 diff --git a/bundle/tests/override_job_cluster_test.go b/bundle/tests/override_job_cluster_test.go new file mode 100644 index 000000000..97f7c04ee --- /dev/null +++ b/bundle/tests/override_job_cluster_test.go @@ -0,0 +1,29 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOverrideJobClusterDev(t *testing.T) { + b := loadEnvironment(t, "./override_job_cluster", "development") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 1, c.NewCluster.NumWorkers) +} + +func TestOverrideJobClusterStaging(t *testing.T) { + b := loadEnvironment(t, "./override_job_cluster", "staging") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 4, c.NewCluster.NumWorkers) +} From 5b819cd982182542476fe249a4c0002e96bd6dd3 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Mon, 14 Aug 2023 14:45:08 +0200 Subject: [PATCH 056/139] Always resolve .databrickscfg file (#659) ## Changes #629 introduced a change to autopopulate the host from .databrickscfg if the user is logging back into a host they were previously using. This did not respect the DATABRICKS_CONFIG_FILE env variable, causing the flow to stop working for users with no .databrickscfg file in their home directory. This PR refactors all config file loading to go through one interface, `databrickscfg.GetDatabricksCfg()`, and an auxiliary `databrickscfg.GetDatabricksCfgPath()` to get the configured file path. Closes #655. ## Tests ``` $ databricks auth login --profile abc Error: open /Users/miles/.databrickscfg: no such file or directory $ ./cli auth login --profile abc Error: cannot load Databricks config file: open /Users/miles/.databrickscfg: no such file or directory $ DATABRICKS_CONFIG_FILE=~/.databrickscfg.bak ./cli auth login --profile abc Databricks Host: https://asdf ``` --- cmd/auth/env.go | 5 +++-- cmd/auth/login.go | 2 +- cmd/auth/profiles.go | 24 +++++--------------- cmd/root/auth.go | 19 +++++++++------- libs/databrickscfg/profiles.go | 34 ++++++++++++++++++++++++----- libs/databrickscfg/profiles_test.go | 9 +++++--- 6 files changed, 55 insertions(+), 38 deletions(-) diff --git a/cmd/auth/env.go b/cmd/auth/env.go index 7bf3fd91f..241d5f880 100644 --- a/cmd/auth/env.go +++ b/cmd/auth/env.go @@ -9,6 +9,7 @@ import ( "net/url" "strings" + "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go/config" "github.com/spf13/cobra" "gopkg.in/ini.v1" @@ -28,7 +29,7 @@ func canonicalHost(host string) (string, error) { var ErrNoMatchingProfiles = errors.New("no matching profiles found") -func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error) { +func resolveSection(cfg *config.Config, iniFile *config.File) (*ini.Section, error) { var candidates []*ini.Section configuredHost, err := canonicalHost(cfg.Host) if err != nil { @@ -68,7 +69,7 @@ func resolveSection(cfg *config.Config, iniFile *ini.File) (*ini.Section, error) } func loadFromDatabricksCfg(cfg *config.Config) error { - iniFile, err := getDatabricksCfg() + iniFile, err := databrickscfg.Get() if errors.Is(err, fs.ErrNotExist) { // it's fine not to have ~/.databrickscfg return nil diff --git a/cmd/auth/login.go b/cmd/auth/login.go index e248118ae..cf1d5c301 100644 --- a/cmd/auth/login.go +++ b/cmd/auth/login.go @@ -61,7 +61,7 @@ func newLoginCommand(persistentAuth *auth.PersistentAuth) *cobra.Command { } // If the chosen profile has a hostname and the user hasn't specified a host, infer the host from the profile. - _, profiles, err := databrickscfg.LoadProfiles(databrickscfg.DefaultPath, func(p databrickscfg.Profile) bool { + _, profiles, err := databrickscfg.LoadProfiles(func(p databrickscfg.Profile) bool { return p.Name == profileName }) if err != nil { diff --git a/cmd/auth/profiles.go b/cmd/auth/profiles.go index 2b08164f6..97d8eeabc 100644 --- a/cmd/auth/profiles.go +++ b/cmd/auth/profiles.go @@ -5,32 +5,16 @@ import ( "fmt" "net/http" "os" - "path/filepath" - "strings" "sync" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/config" "github.com/spf13/cobra" "gopkg.in/ini.v1" ) -func getDatabricksCfg() (*ini.File, error) { - configFile := os.Getenv("DATABRICKS_CONFIG_FILE") - if configFile == "" { - configFile = "~/.databrickscfg" - } - if strings.HasPrefix(configFile, "~") { - homedir, err := os.UserHomeDir() - if err != nil { - return nil, fmt.Errorf("cannot find homedir: %w", err) - } - configFile = filepath.Join(homedir, configFile[1:]) - } - return ini.Load(configFile) -} - type profileMetadata struct { Name string `json:"name"` Host string `json:"host,omitempty"` @@ -111,10 +95,12 @@ func newProfilesCommand() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) error { var profiles []*profileMetadata - iniFile, err := getDatabricksCfg() + iniFile, err := databrickscfg.Get() if os.IsNotExist(err) { // return empty list for non-configured machines - iniFile = ini.Empty() + iniFile = &config.File{ + File: &ini.File{}, + } } else if err != nil { return fmt.Errorf("cannot parse config file: %w", err) } diff --git a/cmd/root/auth.go b/cmd/root/auth.go index c13f74637..2f32d260e 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -40,10 +40,7 @@ func MustAccountClient(cmd *cobra.Command, args []string) error { // 1. only admins will have account configured // 2. 99% of admins will have access to just one account // hence, we don't need to create a special "DEFAULT_ACCOUNT" profile yet - _, profiles, err := databrickscfg.LoadProfiles( - databrickscfg.DefaultPath, - databrickscfg.MatchAccountProfiles, - ) + _, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles) if err != nil { return err } @@ -124,8 +121,11 @@ func transformLoadError(path string, err error) error { } func askForWorkspaceProfile() (string, error) { - path := databrickscfg.DefaultPath - file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchWorkspaceProfiles) + path, err := databrickscfg.GetPath() + if err != nil { + return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchWorkspaceProfiles) if err != nil { return "", transformLoadError(path, err) } @@ -156,8 +156,11 @@ func askForWorkspaceProfile() (string, error) { } func askForAccountProfile() (string, error) { - path := databrickscfg.DefaultPath - file, profiles, err := databrickscfg.LoadProfiles(path, databrickscfg.MatchAccountProfiles) + path, err := databrickscfg.GetPath() + if err != nil { + return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + file, profiles, err := databrickscfg.LoadProfiles(databrickscfg.MatchAccountProfiles) if err != nil { return "", transformLoadError(path, err) } diff --git a/libs/databrickscfg/profiles.go b/libs/databrickscfg/profiles.go index 7892bddd1..864000d03 100644 --- a/libs/databrickscfg/profiles.go +++ b/libs/databrickscfg/profiles.go @@ -1,7 +1,9 @@ package databrickscfg import ( + "fmt" "os" + "path/filepath" "strings" "github.com/databricks/databricks-sdk-go/config" @@ -64,12 +66,34 @@ func MatchAllProfiles(p Profile) bool { return true } -const DefaultPath = "~/.databrickscfg" +// Get the path to the .databrickscfg file, falling back to the default in the current user's home directory. +func GetPath() (string, error) { + configFile := os.Getenv("DATABRICKS_CONFIG_FILE") + if configFile == "" { + configFile = "~/.databrickscfg" + } + if strings.HasPrefix(configFile, "~") { + homedir, err := os.UserHomeDir() + if err != nil { + return "", fmt.Errorf("cannot find homedir: %w", err) + } + configFile = filepath.Join(homedir, configFile[1:]) + } + return configFile, nil +} -func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles Profiles, err error) { - f, err := config.LoadFile(path) +func Get() (*config.File, error) { + configFile, err := GetPath() if err != nil { - return + return nil, fmt.Errorf("cannot determine Databricks config file path: %w", err) + } + return config.LoadFile(configFile) +} + +func LoadProfiles(fn ProfileMatchFunction) (file string, profiles Profiles, err error) { + f, err := Get() + if err != nil { + return "", nil, fmt.Errorf("cannot load Databricks config file: %w", err) } homedir, err := os.UserHomeDir() @@ -106,7 +130,7 @@ func LoadProfiles(path string, fn ProfileMatchFunction) (file string, profiles P } func ProfileCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { - _, profiles, err := LoadProfiles(DefaultPath, MatchAllProfiles) + _, profiles, err := LoadProfiles(MatchAllProfiles) if err != nil { return nil, cobra.ShellCompDirectiveError } diff --git a/libs/databrickscfg/profiles_test.go b/libs/databrickscfg/profiles_test.go index 582c6658e..b1acdce92 100644 --- a/libs/databrickscfg/profiles_test.go +++ b/libs/databrickscfg/profiles_test.go @@ -32,19 +32,22 @@ func TestLoadProfilesReturnsHomedirAsTilde(t *testing.T) { } else { t.Setenv("HOME", "./testdata") } - file, _, err := LoadProfiles("./testdata/databrickscfg", func(p Profile) bool { return true }) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + file, _, err := LoadProfiles(func(p Profile) bool { return true }) require.NoError(t, err) assert.Equal(t, "~/databrickscfg", file) } func TestLoadProfilesMatchWorkspace(t *testing.T) { - _, profiles, err := LoadProfiles("./testdata/databrickscfg", MatchWorkspaceProfiles) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + _, profiles, err := LoadProfiles(MatchWorkspaceProfiles) require.NoError(t, err) assert.Equal(t, []string{"DEFAULT", "query", "foo1", "foo2"}, profiles.Names()) } func TestLoadProfilesMatchAccount(t *testing.T) { - _, profiles, err := LoadProfiles("./testdata/databrickscfg", MatchAccountProfiles) + t.Setenv("DATABRICKS_CONFIG_FILE", "./testdata/databrickscfg") + _, profiles, err := LoadProfiles(MatchAccountProfiles) require.NoError(t, err) assert.Equal(t, []string{"acc"}, profiles.Names()) } From 8dc69365817cdc9b21b0e938cd73e8d9cd5a3f7f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 15 Aug 2023 11:58:54 +0200 Subject: [PATCH 057/139] Merge artifacts and resources block with overrides enabled (#660) ## Changes Originally, these blocks were merged with overrides. This was (inadvertently) disabled in #94. This change re-enables merging these blocks with overrides, such that any field set in an environment override always takes precedence over the field set in the base definition. ## Tests New unit test passes. --- bundle/config/root.go | 10 +++++----- .../resources/databricks.yml | 20 +++++++++++++++++++ .../{ => workspace}/databricks.yml | 0 bundle/tests/environment_overrides_test.go | 18 +++++++++++++---- 4 files changed, 39 insertions(+), 9 deletions(-) create mode 100644 bundle/tests/environment_overrides/resources/databricks.yml rename bundle/tests/environment_overrides/{ => workspace}/databricks.yml (100%) diff --git a/bundle/config/root.go b/bundle/config/root.go index 4ca9d0a05..b6d1efc96 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -166,7 +166,7 @@ func (r *Root) Merge(other *Root) error { } // TODO: define and test semantics for merging. - return mergo.MergeWithOverwrite(r, other) + return mergo.Merge(r, other, mergo.WithOverride) } func (r *Root) MergeEnvironment(env *Environment) error { @@ -178,28 +178,28 @@ func (r *Root) MergeEnvironment(env *Environment) error { } if env.Bundle != nil { - err = mergo.MergeWithOverwrite(&r.Bundle, env.Bundle) + err = mergo.Merge(&r.Bundle, env.Bundle, mergo.WithOverride) if err != nil { return err } } if env.Workspace != nil { - err = mergo.MergeWithOverwrite(&r.Workspace, env.Workspace) + err = mergo.Merge(&r.Workspace, env.Workspace, mergo.WithOverride) if err != nil { return err } } if env.Artifacts != nil { - err = mergo.Merge(&r.Artifacts, env.Artifacts, mergo.WithAppendSlice) + err = mergo.Merge(&r.Artifacts, env.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) if err != nil { return err } } if env.Resources != nil { - err = mergo.Merge(&r.Resources, env.Resources, mergo.WithAppendSlice) + err = mergo.Merge(&r.Resources, env.Resources, mergo.WithOverride, mergo.WithAppendSlice) if err != nil { return err } diff --git a/bundle/tests/environment_overrides/resources/databricks.yml b/bundle/tests/environment_overrides/resources/databricks.yml new file mode 100644 index 000000000..eef5dc010 --- /dev/null +++ b/bundle/tests/environment_overrides/resources/databricks.yml @@ -0,0 +1,20 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + job1: + name: "base job" + +environments: + development: + default: true + + staging: + resources: + jobs: + job1: + name: "staging job" diff --git a/bundle/tests/environment_overrides/databricks.yml b/bundle/tests/environment_overrides/workspace/databricks.yml similarity index 100% rename from bundle/tests/environment_overrides/databricks.yml rename to bundle/tests/environment_overrides/workspace/databricks.yml diff --git a/bundle/tests/environment_overrides_test.go b/bundle/tests/environment_overrides_test.go index 4b8401c86..b8cc224a9 100644 --- a/bundle/tests/environment_overrides_test.go +++ b/bundle/tests/environment_overrides_test.go @@ -6,12 +6,22 @@ import ( "github.com/stretchr/testify/assert" ) -func TestEnvironmentOverridesDev(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides", "development") +func TestEnvironmentOverridesWorkspaceDev(t *testing.T) { + b := loadEnvironment(t, "./environment_overrides/workspace", "development") assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host) } -func TestEnvironmentOverridesStaging(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides", "staging") +func TestEnvironmentOverridesWorkspaceStaging(t *testing.T) { + b := loadEnvironment(t, "./environment_overrides/workspace", "staging") assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host) } + +func TestEnvironmentOverridesResourcesDev(t *testing.T) { + b := loadEnvironment(t, "./environment_overrides/resources", "development") + assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) +} + +func TestEnvironmentOverridesResourcesStaging(t *testing.T) { + b := loadEnvironment(t, "./environment_overrides/resources", "staging") + assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) +} From 5cdaacacc37b60bbefd479284577ab60d224d45c Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 15 Aug 2023 15:39:32 +0200 Subject: [PATCH 058/139] Locked terraform binary version to <= 1.5.5 (#666) ## Changes Locked terraform binary version to <= 1.5.5 --- bundle/deploy/terraform/init.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index eb3e99d18..924c1f090 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -57,7 +57,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con // Download Terraform to private bin directory. installer := &releases.LatestVersion{ Product: product.Terraform, - Constraints: version.MustConstraints(version.NewConstraint("<2.0")), + Constraints: version.MustConstraints(version.NewConstraint("<=1.5.5")), InstallDir: binDir, } execPath, err = installer.Install(ctx) From 6e708da6fca43ba2c2c15e849a6e09e122afc687 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 15 Aug 2023 15:50:40 +0200 Subject: [PATCH 059/139] Upgraded Go version to 1.21 (#664) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes Upgraded Go version to 1.21 Upgraded to use `slices` and `slog` from core instead of experimental. Still use `exp/maps` as our code relies on `maps.Keys` which is not part of core package and therefore refactoring required. ### Tests Integration tests passed ``` [DEBUG] Test execution command: /opt/homebrew/opt/go@1.21/bin/go test ./... -json -timeout 1h -run ^TestAcc [DEBUG] Test execution directory: /Users/andrew.nester/cli 2023/08/15 13:20:51 [INFO] ✅ TestAccAlertsCreateErrWhenNoArguments (2.150s) 2023/08/15 13:20:52 [INFO] ✅ TestAccApiGet (0.580s) 2023/08/15 13:20:53 [INFO] ✅ TestAccClustersList (0.900s) 2023/08/15 13:20:54 [INFO] ✅ TestAccClustersGet (0.870s) 2023/08/15 13:21:06 [INFO] ✅ TestAccFilerWorkspaceFilesReadWrite (11.980s) 2023/08/15 13:21:13 [INFO] ✅ TestAccFilerWorkspaceFilesReadDir (7.060s) 2023/08/15 13:21:25 [INFO] ✅ TestAccFilerDbfsReadWrite (12.810s) 2023/08/15 13:21:33 [INFO] ✅ TestAccFilerDbfsReadDir (7.380s) 2023/08/15 13:21:41 [INFO] ✅ TestAccFilerWorkspaceNotebookConflict (7.760s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerWorkspaceNotebookWithOverwriteFlag (8.660s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerLocalReadWrite (0.020s) 2023/08/15 13:21:49 [INFO] ✅ TestAccFilerLocalReadDir (0.010s) 2023/08/15 13:21:52 [INFO] ✅ TestAccFsCatForDbfs (3.190s) 2023/08/15 13:21:53 [INFO] ✅ TestAccFsCatForDbfsOnNonExistentFile (0.890s) 2023/08/15 13:21:54 [INFO] ✅ TestAccFsCatForDbfsInvalidScheme (0.600s) 2023/08/15 13:21:57 [INFO] ✅ TestAccFsCatDoesNotSupportOutputModeJson (2.960s) 2023/08/15 13:22:28 [INFO] ✅ TestAccFsCpDir (31.480s) 2023/08/15 13:22:43 [INFO] ✅ TestAccFsCpFileToFile (14.530s) 2023/08/15 13:22:58 [INFO] ✅ TestAccFsCpFileToDir (14.610s) 2023/08/15 13:23:29 [INFO] ✅ TestAccFsCpDirToDirFileNotOverwritten (31.810s) 2023/08/15 13:23:47 [INFO] ✅ TestAccFsCpFileToDirFileNotOverwritten (17.500s) 2023/08/15 13:24:04 [INFO] ✅ TestAccFsCpFileToFileFileNotOverwritten (17.260s) 2023/08/15 13:24:37 [INFO] ✅ TestAccFsCpDirToDirWithOverwriteFlag (32.690s) 2023/08/15 13:24:56 [INFO] ✅ TestAccFsCpFileToFileWithOverwriteFlag (19.290s) 2023/08/15 13:25:15 [INFO] ✅ TestAccFsCpFileToDirWithOverwriteFlag (19.230s) 2023/08/15 13:25:17 [INFO] ✅ TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag (2.010s) 2023/08/15 13:25:18 [INFO] ✅ TestAccFsCpErrorsOnInvalidScheme (0.610s) 2023/08/15 13:25:33 [INFO] ✅ TestAccFsCpSourceIsDirectoryButTargetIsFile (14.900s) 2023/08/15 13:25:37 [INFO] ✅ TestAccFsLsForDbfs (3.770s) 2023/08/15 13:25:41 [INFO] ✅ TestAccFsLsForDbfsWithAbsolutePaths (4.160s) 2023/08/15 13:25:44 [INFO] ✅ TestAccFsLsForDbfsOnFile (2.990s) 2023/08/15 13:25:46 [INFO] ✅ TestAccFsLsForDbfsOnEmptyDir (1.870s) 2023/08/15 13:25:46 [INFO] ✅ TestAccFsLsForDbfsForNonexistingDir (0.850s) 2023/08/15 13:25:47 [INFO] ✅ TestAccFsLsWithoutScheme (0.560s) 2023/08/15 13:25:49 [INFO] ✅ TestAccFsMkdirCreatesDirectory (2.310s) 2023/08/15 13:25:52 [INFO] ✅ TestAccFsMkdirCreatesMultipleDirectories (2.920s) 2023/08/15 13:25:55 [INFO] ✅ TestAccFsMkdirWhenDirectoryAlreadyExists (2.320s) 2023/08/15 13:25:57 [INFO] ✅ TestAccFsMkdirWhenFileExistsAtPath (2.820s) 2023/08/15 13:26:01 [INFO] ✅ TestAccFsRmForFile (4.030s) 2023/08/15 13:26:05 [INFO] ✅ TestAccFsRmForEmptyDirectory (3.530s) 2023/08/15 13:26:08 [INFO] ✅ TestAccFsRmForNonEmptyDirectory (3.190s) 2023/08/15 13:26:09 [INFO] ✅ TestAccFsRmForNonExistentFile (0.830s) 2023/08/15 13:26:13 [INFO] ✅ TestAccFsRmForNonEmptyDirectoryWithRecursiveFlag (3.580s) 2023/08/15 13:26:13 [INFO] ✅ TestAccGitClone (0.800s) 2023/08/15 13:26:14 [INFO] ✅ TestAccGitCloneWithOnlyRepoNameOnAlternateBranch (0.790s) 2023/08/15 13:26:15 [INFO] ✅ TestAccGitCloneErrorsWhenRepositoryDoesNotExist (0.540s) 2023/08/15 13:26:23 [INFO] ✅ TestAccLock (8.630s) 2023/08/15 13:26:27 [INFO] ✅ TestAccLockUnlockWithoutAllowsLockFileNotExist (3.490s) 2023/08/15 13:26:30 [INFO] ✅ TestAccLockUnlockWithAllowsLockFileNotExist (3.130s) 2023/08/15 13:26:39 [INFO] ✅ TestAccSyncFullFileSync (9.370s) 2023/08/15 13:26:50 [INFO] ✅ TestAccSyncIncrementalFileSync (10.390s) 2023/08/15 13:27:00 [INFO] ✅ TestAccSyncNestedFolderSync (10.680s) 2023/08/15 13:27:11 [INFO] ✅ TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory (10.970s) 2023/08/15 13:27:22 [INFO] ✅ TestAccSyncNestedSpacePlusAndHashAreEscapedSync (10.930s) 2023/08/15 13:27:29 [INFO] ✅ TestAccSyncIncrementalFileOverwritesFolder (7.020s) 2023/08/15 13:27:37 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookToFile (7.380s) 2023/08/15 13:27:43 [INFO] ✅ TestAccSyncIncrementalSyncFileToPythonNotebook (6.050s) 2023/08/15 13:27:48 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookDelete (5.390s) 2023/08/15 13:27:51 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist (2.570s) 2023/08/15 13:27:56 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoExists (5.540s) 2023/08/15 13:27:58 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableInWorkspace (1.840s) 2023/08/15 13:27:59 [INFO] ✅ TestAccWorkspaceList (0.790s) 2023/08/15 13:28:08 [INFO] ✅ TestAccExportDir (8.860s) 2023/08/15 13:28:11 [INFO] ✅ TestAccExportDirDoesNotOverwrite (3.090s) 2023/08/15 13:28:14 [INFO] ✅ TestAccExportDirWithOverwriteFlag (3.500s) 2023/08/15 13:28:23 [INFO] ✅ TestAccImportDir (8.330s) 2023/08/15 13:28:34 [INFO] ✅ TestAccImportDirDoesNotOverwrite (10.970s) 2023/08/15 13:28:44 [INFO] ✅ TestAccImportDirWithOverwriteFlag (10.130s) 2023/08/15 13:28:44 [INFO] ✅ 68/68 passed, 0 failed, 3 skipped ``` --- .github/workflows/push.yml | 6 ++---- .github/workflows/release-snapshot.yml | 2 +- .github/workflows/release.yml | 2 +- bundle/artifacts/all.go | 3 ++- bundle/config/interpolation/interpolation.go | 3 ++- bundle/config/interpolation/lookup.go | 3 +-- bundle/config/mutator/process_root_includes.go | 2 +- bundle/internal/tf/codegen/generator/util.go | 3 ++- bundle/internal/tf/codegen/generator/walker.go | 3 ++- cmd/root/logger.go | 11 ++++++----- cmd/root/root.go | 3 ++- go.mod | 2 +- go.sum | 15 +++++++++++++++ internal/acc/helpers.go | 2 -- internal/helpers.go | 1 - libs/auth/oauth.go | 3 +-- libs/cmdio/io.go | 6 +++--- libs/filer/dbfs_client.go | 2 +- libs/filer/files_client.go | 2 +- libs/filer/local_client.go | 3 +-- libs/filer/slice.go | 2 +- libs/filer/workspace_files_client.go | 2 +- libs/flags/log_level_flag.go | 2 +- libs/locker/locker.go | 2 +- libs/log/context.go | 2 +- libs/log/levels.go | 2 +- libs/log/logger.go | 2 +- libs/log/replace_attr.go | 2 +- libs/log/replace_attr_test.go | 2 +- libs/log/sdk.go | 3 ++- libs/log/source.go | 3 +-- libs/log/source_test.go | 2 +- libs/template/renderer.go | 2 +- libs/template/validators.go | 2 +- 34 files changed, 60 insertions(+), 47 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index c47cfc726..6f14fe881 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -28,7 +28,7 @@ jobs: - name: Setup Go uses: actions/setup-go@v3 with: - go-version: 1.19.5 + go-version: 1.21.0 cache: true - name: Set go env @@ -56,9 +56,7 @@ jobs: - name: Setup Go uses: actions/setup-go@v3 with: - # Use 1.19 because of godoc formatting. - # See https://tip.golang.org/doc/go1.19#go-doc. - go-version: 1.19 + go-version: 1.21 # No need to download cached dependencies when running gofmt. cache: false diff --git a/.github/workflows/release-snapshot.yml b/.github/workflows/release-snapshot.yml index 3acb6b5a5..130d49dd0 100644 --- a/.github/workflows/release-snapshot.yml +++ b/.github/workflows/release-snapshot.yml @@ -22,7 +22,7 @@ jobs: id: go uses: actions/setup-go@v3 with: - go-version: 1.19.5 + go-version: 1.21.0 - name: Locate cache paths id: cache diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c1ecef011..5992dcb46 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ jobs: id: go uses: actions/setup-go@v3 with: - go-version: 1.19.5 + go-version: 1.21.0 - name: Locate cache paths id: cache diff --git a/bundle/artifacts/all.go b/bundle/artifacts/all.go index b6a3f7dc6..1a1661e5f 100644 --- a/bundle/artifacts/all.go +++ b/bundle/artifacts/all.go @@ -4,9 +4,10 @@ import ( "context" "fmt" + "slices" + "github.com/databricks/cli/bundle" "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) // all is an internal proxy for producing a list of mutators for all artifacts. diff --git a/bundle/config/interpolation/interpolation.go b/bundle/config/interpolation/interpolation.go index bf19804a0..bf5bd169e 100644 --- a/bundle/config/interpolation/interpolation.go +++ b/bundle/config/interpolation/interpolation.go @@ -9,10 +9,11 @@ import ( "sort" "strings" + "slices" + "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) const Delimiter = "." diff --git a/bundle/config/interpolation/lookup.go b/bundle/config/interpolation/lookup.go index 932d739e2..3dc5047a7 100644 --- a/bundle/config/interpolation/lookup.go +++ b/bundle/config/interpolation/lookup.go @@ -3,9 +3,8 @@ package interpolation import ( "errors" "fmt" + "slices" "strings" - - "golang.org/x/exp/slices" ) // LookupFunction returns the value to rewrite a path expression to. diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index c2dffc6ee..989928721 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -5,11 +5,11 @@ import ( "fmt" "os" "path/filepath" + "slices" "strings" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "golang.org/x/exp/slices" ) // Get extra include paths from environment variable diff --git a/bundle/internal/tf/codegen/generator/util.go b/bundle/internal/tf/codegen/generator/util.go index 890417d81..6e703a703 100644 --- a/bundle/internal/tf/codegen/generator/util.go +++ b/bundle/internal/tf/codegen/generator/util.go @@ -1,8 +1,9 @@ package generator import ( + "slices" + "golang.org/x/exp/maps" - "golang.org/x/exp/slices" ) // sortKeys returns a sorted copy of the keys in the specified map. diff --git a/bundle/internal/tf/codegen/generator/walker.go b/bundle/internal/tf/codegen/generator/walker.go index 9532e0e4d..2ed044c3d 100644 --- a/bundle/internal/tf/codegen/generator/walker.go +++ b/bundle/internal/tf/codegen/generator/walker.go @@ -4,10 +4,11 @@ import ( "fmt" "strings" + "slices" + tfjson "github.com/hashicorp/terraform-json" "github.com/iancoleman/strcase" "github.com/zclconf/go-cty/cty" - "golang.org/x/exp/slices" ) type field struct { diff --git a/cmd/root/logger.go b/cmd/root/logger.go index 87f695503..ddfae445a 100644 --- a/cmd/root/logger.go +++ b/cmd/root/logger.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "io" + "log/slog" "os" "github.com/databricks/cli/libs/cmdio" @@ -11,7 +12,6 @@ import ( "github.com/databricks/cli/libs/log" "github.com/fatih/color" "github.com/spf13/cobra" - "golang.org/x/exp/slog" ) const ( @@ -52,11 +52,12 @@ func (l *friendlyHandler) coloredLevel(rec slog.Record) string { func (l *friendlyHandler) Handle(ctx context.Context, rec slog.Record) error { t := fmt.Sprintf("%02d:%02d", rec.Time.Hour(), rec.Time.Minute()) attrs := "" - rec.Attrs(func(a slog.Attr) { + rec.Attrs(func(a slog.Attr) bool { attrs += fmt.Sprintf(" %s%s%s", color.CyanString(a.Key), color.CyanString("="), color.YellowString(a.Value.String())) + return true }) msg := fmt.Sprintf("%s %s %s%s\n", color.MagentaString(t), @@ -76,16 +77,16 @@ type logFlags struct { func (f *logFlags) makeLogHandler(opts slog.HandlerOptions) (slog.Handler, error) { switch f.output { case flags.OutputJSON: - return opts.NewJSONHandler(f.file.Writer()), nil + return slog.NewJSONHandler(f.file.Writer(), &opts), nil case flags.OutputText: w := f.file.Writer() if cmdio.IsTTY(w) { return &friendlyHandler{ - Handler: opts.NewTextHandler(w), + Handler: slog.NewTextHandler(w, &opts), w: w, }, nil } - return opts.NewTextHandler(w), nil + return slog.NewTextHandler(w, &opts), nil default: return nil, fmt.Errorf("invalid log output mode: %s", f.output) diff --git a/cmd/root/root.go b/cmd/root/root.go index 0a18594a1..48868b41f 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -6,11 +6,12 @@ import ( "os" "strings" + "log/slog" + "github.com/databricks/cli/internal/build" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" - "golang.org/x/exp/slog" ) func New() *cobra.Command { diff --git a/go.mod b/go.mod index c3efa91b2..9534a4c9d 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/databricks/cli -go 1.18 +go 1.21 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 diff --git a/go.sum b/go.sum index 1edb3b48d..b8c90e5e6 100644 --- a/go.sum +++ b/go.sum @@ -6,9 +6,11 @@ cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGB cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VMrpA= +github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 h1:wPbRQzjjwFc0ih8puEVAOFGELsn1zoIIYdxvML7mDxA= github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8/go.mod h1:I0gYDMZ6Z5GRU7l58bNFSkPTFN6Yl12dsUlAZ8xy98g= github.com/acomagu/bufpipe v1.0.4 h1:e3H4WUzM3npvo5uv95QuJM3cQspFNtFBzvJ2oNjKIDQ= +github.com/acomagu/bufpipe v1.0.4/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ2sYmHc4= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= @@ -40,6 +42,7 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -52,8 +55,11 @@ github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBD github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= +github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= github.com/go-git/go-billy/v5 v5.4.1 h1:Uwp5tDRkPr+l/TnbHOQzp+tmJfLceOlbVucgpTz8ix4= +github.com/go-git/go-billy/v5 v5.4.1/go.mod h1:vjbugF6Fz7JIflbVpl1hJsGjSHNltrSw45YK/ukIvQg= github.com/go-git/go-git/v5 v5.6.1 h1:q4ZRqQl4pR/ZJHc1L5CFjGA1a10u76aV1iC+nh+bHsk= +github.com/go-git/go-git/v5 v5.6.1/go.mod h1:mvyoL6Unz0PiTQrGQfSfiLFhBH1c1e84ylC2MDs4ee8= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= @@ -84,6 +90,7 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= @@ -94,6 +101,7 @@ github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvkiqTYKBCKLNmlge2eVjoZfySzM= github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w= github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas= +github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= @@ -110,7 +118,9 @@ github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/manifoldco/promptui v0.9.0 h1:3V4HzJk1TtXW1MTZMP7mdlwbBpIinw3HztaIlYthEiA= github.com/manifoldco/promptui v0.9.0/go.mod h1:ka04sppxSGFAtxX0qhlYQjISsg9mR4GWtQEhdbn6Pgg= github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= @@ -124,6 +134,7 @@ github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D github.com/nwidger/jsoncolor v0.3.2 h1:rVJJlwAWDJShnbTYOQ5RM7yTA20INyKXlJ/fg4JMhHQ= github.com/nwidger/jsoncolor v0.3.2/go.mod h1:Cs34umxLbJvgBMnVNVqhji9BhoT/N/KinHqZptQ7cf4= github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU= github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= @@ -134,7 +145,9 @@ github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 h1:OkMGxebDjyw0ULyrTYWeN0UNCCkmCWfjPnIA2W6oviI= github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06/go.mod h1:+ePHsJ1keEjQtpvf9HHw0f4ZeJ0TLRsxhunSI2hYJSs= github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ= +github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/skeema/knownhosts v1.1.0 h1:Wvr9V0MxhjRbl3f9nMnKnFfiWTJmtECJ9Njkea3ysW0= +github.com/skeema/knownhosts v1.1.0/go.mod h1:sKFq3RD6/TKZkSWn8boUbDC7Qkgcv+8XXijpFO6roag= github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I= github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= @@ -153,6 +166,7 @@ github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXl github.com/whilp/git-urls v1.0.0 h1:95f6UMWN5FKW71ECsXRUd3FVYiXdrE7aX4NZKcPmIjU= github.com/whilp/git-urls v1.0.0/go.mod h1:J16SAmobsqc3Qcy98brfl5f5+e0clUvg1krgwk/qCfE= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= @@ -283,6 +297,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= diff --git a/internal/acc/helpers.go b/internal/acc/helpers.go index aa9902745..f98001346 100644 --- a/internal/acc/helpers.go +++ b/internal/acc/helpers.go @@ -6,7 +6,6 @@ import ( "os" "strings" "testing" - "time" ) // GetEnvOrSkipTest proceeds with test only with that env variable. @@ -22,7 +21,6 @@ const charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" // RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-") func RandomName(prefix ...string) string { - rand.Seed(time.Now().UnixNano()) randLen := 12 b := make([]byte, randLen) for i := range b { diff --git a/internal/helpers.go b/internal/helpers.go index 194f0eee4..ddc005173 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -37,7 +37,6 @@ func GetEnvOrSkipTest(t *testing.T, name string) string { // RandomName gives random name with optional prefix. e.g. qa.RandomName("tf-") func RandomName(prefix ...string) string { - rand.Seed(time.Now().UnixNano()) randLen := 12 b := make([]byte, randLen) for i := range b { diff --git a/libs/auth/oauth.go b/libs/auth/oauth.go index b7e0ce2f0..dd27d04b2 100644 --- a/libs/auth/oauth.go +++ b/libs/auth/oauth.go @@ -2,6 +2,7 @@ package auth import ( "context" + "crypto/rand" "crypto/sha256" _ "embed" "encoding/base64" @@ -9,7 +10,6 @@ import ( "errors" "fmt" "io" - "math/rand" "net" "net/http" "strings" @@ -255,7 +255,6 @@ func (a *PersistentAuth) stateAndPKCE() (string, *authhandler.PKCEParams) { } func (a *PersistentAuth) randomString(size int) string { - rand.Seed(time.Now().UnixNano()) raw := make([]byte, size) _, _ = rand.Read(raw) return base64.RawURLEncoding.EncodeToString(raw) diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index a60231c09..bc5a5f302 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "os" + "slices" "strings" "time" @@ -12,7 +13,6 @@ import ( "github.com/databricks/cli/libs/flags" "github.com/manifoldco/promptui" "github.com/mattn/go-isatty" - "golang.org/x/exp/slices" ) // cmdIO is the private instance, that is not supposed to be accessed @@ -140,8 +140,8 @@ func (c *cmdIO) Select(names map[string]string, label string) (id string, err er for k, v := range names { items = append(items, tuple{k, v}) } - slices.SortFunc(items, func(a, b tuple) bool { - return a.Name < b.Name + slices.SortFunc(items, func(a, b tuple) int { + return strings.Compare(a.Name, b.Name) }) idx, _, err := (&promptui.Select{ Label: label, diff --git a/libs/filer/dbfs_client.go b/libs/filer/dbfs_client.go index 64eb4b77e..38e8f9f3f 100644 --- a/libs/filer/dbfs_client.go +++ b/libs/filer/dbfs_client.go @@ -7,6 +7,7 @@ import ( "io/fs" "net/http" "path" + "slices" "sort" "strings" "time" @@ -14,7 +15,6 @@ import ( "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/files" - "golang.org/x/exp/slices" ) // Type that implements fs.DirEntry for DBFS. diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go index ee7587dcc..285338b66 100644 --- a/libs/filer/files_client.go +++ b/libs/filer/files_client.go @@ -10,13 +10,13 @@ import ( "net/http" "net/url" "path" + "slices" "strings" "time" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/client" - "golang.org/x/exp/slices" ) // Type that implements fs.FileInfo for the Files API. diff --git a/libs/filer/local_client.go b/libs/filer/local_client.go index 8d960c84b..958b6277d 100644 --- a/libs/filer/local_client.go +++ b/libs/filer/local_client.go @@ -6,8 +6,7 @@ import ( "io/fs" "os" "path/filepath" - - "golang.org/x/exp/slices" + "slices" ) // LocalClient implements the [Filer] interface for the local filesystem. diff --git a/libs/filer/slice.go b/libs/filer/slice.go index c35d6e78a..077bb305f 100644 --- a/libs/filer/slice.go +++ b/libs/filer/slice.go @@ -1,6 +1,6 @@ package filer -import "golang.org/x/exp/slices" +import "slices" // sliceWithout returns a copy of the specified slice without element e, if it is present. func sliceWithout[S []E, E comparable](s S, e E) S { diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go index db06f91c2..ed4ad7a2b 100644 --- a/libs/filer/workspace_files_client.go +++ b/libs/filer/workspace_files_client.go @@ -11,6 +11,7 @@ import ( "net/url" "path" "regexp" + "slices" "sort" "strings" "time" @@ -19,7 +20,6 @@ import ( "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/client" "github.com/databricks/databricks-sdk-go/service/workspace" - "golang.org/x/exp/slices" ) // Type that implements fs.DirEntry for WSFS. diff --git a/libs/flags/log_level_flag.go b/libs/flags/log_level_flag.go index f5d305a50..836d84b70 100644 --- a/libs/flags/log_level_flag.go +++ b/libs/flags/log_level_flag.go @@ -2,12 +2,12 @@ package flags import ( "fmt" + "log/slog" "strings" "github.com/databricks/cli/libs/log" "github.com/spf13/cobra" "golang.org/x/exp/maps" - "golang.org/x/exp/slog" ) var levels = map[string]slog.Level{ diff --git a/libs/locker/locker.go b/libs/locker/locker.go index bb95b7840..66993156d 100644 --- a/libs/locker/locker.go +++ b/libs/locker/locker.go @@ -8,12 +8,12 @@ import ( "fmt" "io" "io/fs" + "slices" "time" "github.com/databricks/cli/libs/filer" "github.com/databricks/databricks-sdk-go" "github.com/google/uuid" - "golang.org/x/exp/slices" ) type UnlockOption int diff --git a/libs/log/context.go b/libs/log/context.go index 7ed1d292e..d9e31d116 100644 --- a/libs/log/context.go +++ b/libs/log/context.go @@ -3,7 +3,7 @@ package log import ( "context" - "golang.org/x/exp/slog" + "log/slog" ) type logger int diff --git a/libs/log/levels.go b/libs/log/levels.go index f6277cf3f..cdb5a1e1e 100644 --- a/libs/log/levels.go +++ b/libs/log/levels.go @@ -1,6 +1,6 @@ package log -import "golang.org/x/exp/slog" +import "log/slog" const ( LevelTrace slog.Level = -8 diff --git a/libs/log/logger.go b/libs/log/logger.go index 80d8782cf..43a30e92b 100644 --- a/libs/log/logger.go +++ b/libs/log/logger.go @@ -6,7 +6,7 @@ import ( "runtime" "time" - "golang.org/x/exp/slog" + "log/slog" ) // GetLogger returns either the logger configured on the context, diff --git a/libs/log/replace_attr.go b/libs/log/replace_attr.go index 55d2c15ff..b71e47d23 100644 --- a/libs/log/replace_attr.go +++ b/libs/log/replace_attr.go @@ -1,6 +1,6 @@ package log -import "golang.org/x/exp/slog" +import "log/slog" type ReplaceAttrFunction func(groups []string, a slog.Attr) slog.Attr diff --git a/libs/log/replace_attr_test.go b/libs/log/replace_attr_test.go index dce11be18..afedeaa61 100644 --- a/libs/log/replace_attr_test.go +++ b/libs/log/replace_attr_test.go @@ -1,10 +1,10 @@ package log import ( + "log/slog" "testing" "github.com/stretchr/testify/assert" - "golang.org/x/exp/slog" ) func testReplaceA(groups []string, a slog.Attr) slog.Attr { diff --git a/libs/log/sdk.go b/libs/log/sdk.go index 975f83aa7..e1b1ffed4 100644 --- a/libs/log/sdk.go +++ b/libs/log/sdk.go @@ -6,8 +6,9 @@ import ( "runtime" "time" + "log/slog" + sdk "github.com/databricks/databricks-sdk-go/logger" - "golang.org/x/exp/slog" ) // slogAdapter makes an slog.Logger usable with the Databricks SDK. diff --git a/libs/log/source.go b/libs/log/source.go index 4a30aaab3..d0fd30dc5 100644 --- a/libs/log/source.go +++ b/libs/log/source.go @@ -1,9 +1,8 @@ package log import ( + "log/slog" "path/filepath" - - "golang.org/x/exp/slog" ) // ReplaceSourceAttr rewrites the source attribute to include only the file's basename. diff --git a/libs/log/source_test.go b/libs/log/source_test.go index 010aad5ab..5c587af66 100644 --- a/libs/log/source_test.go +++ b/libs/log/source_test.go @@ -1,10 +1,10 @@ package log import ( + "log/slog" "testing" "github.com/stretchr/testify/assert" - "golang.org/x/exp/slog" ) func TestReplaceSourceAttrSourceKey(t *testing.T) { diff --git a/libs/template/renderer.go b/libs/template/renderer.go index 76479c05d..9be1b58ec 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -8,13 +8,13 @@ import ( "os" "path" "path/filepath" + "slices" "strings" "text/template" "github.com/databricks/cli/libs/filer" "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go/logger" - "golang.org/x/exp/slices" ) const templateExtension = ".tmpl" diff --git a/libs/template/validators.go b/libs/template/validators.go index 57eda0935..209700b63 100644 --- a/libs/template/validators.go +++ b/libs/template/validators.go @@ -3,9 +3,9 @@ package template import ( "fmt" "reflect" + "slices" "github.com/databricks/cli/libs/jsonschema" - "golang.org/x/exp/slices" ) type validator func(v any) error From 878bb6deaed425d5d5c47b7330f47209b2ca71ca Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 15 Aug 2023 16:28:04 +0200 Subject: [PATCH 060/139] Return better error messages for invalid JSON schema types in templates (#661) ## Changes Adds a function to validate json schema types added by the author. The default json unmarshaller does not validate that the parsed type matches the enum defined in `jsonschema.Type` Includes some other improvements to provide better error messages. This PR was prompted by usability difficulties reported by @mingyu89 during mlops stack migration. ## Tests Unit tests --- libs/jsonschema/schema.go | 37 ++++++++++++++++++++++++++++ libs/jsonschema/schema_test.go | 44 ++++++++++++++++++++++++++++++++++ libs/template/config.go | 15 ++++++++---- libs/template/config_test.go | 31 ++++++++++++++++++++++++ libs/template/utils.go | 8 +++++-- libs/template/utils_test.go | 6 +++++ 6 files changed, 135 insertions(+), 6 deletions(-) create mode 100644 libs/jsonschema/schema_test.go diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index 49e31bb74..c0d1736c1 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -1,5 +1,11 @@ package jsonschema +import ( + "encoding/json" + "fmt" + "os" +) + // defines schema for a json object type Schema struct { // Type of the object @@ -47,3 +53,34 @@ const ( ArrayType Type = "array" IntegerType Type = "integer" ) + +func (schema *Schema) validate() error { + for _, v := range schema.Properties { + switch v.Type { + case NumberType, BooleanType, StringType, IntegerType: + continue + case "int", "int32", "int64": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"integer\" instead", v.Type) + case "float", "float32", "float64": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"number\" instead", v.Type) + case "bool": + return fmt.Errorf("type %s is not a recognized json schema type. Please use \"boolean\" instead", v.Type) + default: + return fmt.Errorf("type %s is not a recognized json schema type", v.Type) + } + } + return nil +} + +func Load(path string) (*Schema, error) { + b, err := os.ReadFile(path) + if err != nil { + return nil, err + } + schema := &Schema{} + err = json.Unmarshal(b, schema) + if err != nil { + return nil, err + } + return schema, schema.validate() +} diff --git a/libs/jsonschema/schema_test.go b/libs/jsonschema/schema_test.go new file mode 100644 index 000000000..76112492f --- /dev/null +++ b/libs/jsonschema/schema_test.go @@ -0,0 +1,44 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestJsonSchemaValidate(t *testing.T) { + var err error + toSchema := func(s string) *Schema { + return &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: Type(s), + }, + }, + } + } + + err = toSchema("string").validate() + assert.NoError(t, err) + + err = toSchema("boolean").validate() + assert.NoError(t, err) + + err = toSchema("number").validate() + assert.NoError(t, err) + + err = toSchema("integer").validate() + assert.NoError(t, err) + + err = toSchema("int").validate() + assert.EqualError(t, err, "type int is not a recognized json schema type. Please use \"integer\" instead") + + err = toSchema("float").validate() + assert.EqualError(t, err, "type float is not a recognized json schema type. Please use \"number\" instead") + + err = toSchema("bool").validate() + assert.EqualError(t, err, "type bool is not a recognized json schema type. Please use \"boolean\" instead") + + err = toSchema("foobar").validate() + assert.EqualError(t, err, "type foobar is not a recognized json schema type") +} diff --git a/libs/template/config.go b/libs/template/config.go index ee5fcbef8..173244b0b 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -18,13 +18,11 @@ type config struct { func newConfig(ctx context.Context, schemaPath string) (*config, error) { // Read config schema - schemaBytes, err := os.ReadFile(schemaPath) + schema, err := jsonschema.Load(schemaPath) if err != nil { return nil, err } - schema := &jsonschema.Schema{} - err = json.Unmarshal(schemaBytes, schema) - if err != nil { + if err := validateSchema(schema); err != nil { return nil, err } @@ -36,6 +34,15 @@ func newConfig(ctx context.Context, schemaPath string) (*config, error) { }, nil } +func validateSchema(schema *jsonschema.Schema) error { + for _, v := range schema.Properties { + if v.Type == jsonschema.ArrayType || v.Type == jsonschema.ObjectType { + return fmt.Errorf("property type %s is not supported by bundle templates", v.Type) + } + } + return nil +} + // Reads json file at path and assigns values from the file func (c *config) assignValuesFromFile(path string) error { // Read the config file diff --git a/libs/template/config_test.go b/libs/template/config_test.go index 7b8341ec4..335242467 100644 --- a/libs/template/config_test.go +++ b/libs/template/config_test.go @@ -161,3 +161,34 @@ func TestTemplateConfigValidateTypeForInvalidType(t *testing.T) { err = c.validate() assert.EqualError(t, err, `incorrect type for int_val. expected type integer, but value is "this-should-be-an-int"`) } + +func TestTemplateValidateSchema(t *testing.T) { + var err error + toSchema := func(s string) *jsonschema.Schema { + return &jsonschema.Schema{ + Properties: map[string]*jsonschema.Schema{ + "foo": { + Type: jsonschema.Type(s), + }, + }, + } + } + + err = validateSchema(toSchema("string")) + assert.NoError(t, err) + + err = validateSchema(toSchema("boolean")) + assert.NoError(t, err) + + err = validateSchema(toSchema("number")) + assert.NoError(t, err) + + err = validateSchema(toSchema("integer")) + assert.NoError(t, err) + + err = validateSchema(toSchema("object")) + assert.EqualError(t, err, "property type object is not supported by bundle templates") + + err = validateSchema(toSchema("array")) + assert.EqualError(t, err, "property type array is not supported by bundle templates") +} diff --git a/libs/template/utils.go b/libs/template/utils.go index bf11ed86f..ade6a5730 100644 --- a/libs/template/utils.go +++ b/libs/template/utils.go @@ -66,8 +66,10 @@ func toString(v any, T jsonschema.Type) (string, error) { return "", err } return strconv.FormatInt(intVal, 10), nil - default: + case jsonschema.ArrayType, jsonschema.ObjectType: return "", fmt.Errorf("cannot format object of type %s as a string. Value of object: %#v", T, v) + default: + return "", fmt.Errorf("unknown json schema type: %q", T) } } @@ -87,8 +89,10 @@ func fromString(s string, T jsonschema.Type) (any, error) { v, err = strconv.ParseFloat(s, 32) case jsonschema.IntegerType: v, err = strconv.ParseInt(s, 10, 64) - default: + case jsonschema.ArrayType, jsonschema.ObjectType: return "", fmt.Errorf("cannot parse string as object of type %s. Value of string: %q", T, s) + default: + return "", fmt.Errorf("unknown json schema type: %q", T) } // Return more readable error incase of a syntax error diff --git a/libs/template/utils_test.go b/libs/template/utils_test.go index 5fe702439..1e038aac6 100644 --- a/libs/template/utils_test.go +++ b/libs/template/utils_test.go @@ -80,6 +80,9 @@ func TestTemplateToString(t *testing.T) { _, err = toString("abc", jsonschema.IntegerType) assert.EqualError(t, err, "cannot convert \"abc\" to an integer") + + _, err = toString("abc", "foobar") + assert.EqualError(t, err, "unknown json schema type: \"foobar\"") } func TestTemplateFromString(t *testing.T) { @@ -112,4 +115,7 @@ func TestTemplateFromString(t *testing.T) { _, err = fromString("1.0", jsonschema.IntegerType) assert.EqualError(t, err, "could not parse \"1.0\" as a integer: strconv.ParseInt: parsing \"1.0\": invalid syntax") + + _, err = fromString("1.0", "foobar") + assert.EqualError(t, err, "unknown json schema type: \"foobar\"") } From 61b103318fabfce36bdc5271452afa3c9a94e89c Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 15 Aug 2023 16:50:20 +0200 Subject: [PATCH 061/139] Use custom prompter for bundle template inputs (#663) ## Changes Prompt UI glitches often. We are switching to a custom implementation of a simple prompter which is much more stable. This also allows new lines in prompts which has been an ask by the mlflow team. ## Tests Tested manually --- bundle/deploy/files/delete.go | 2 +- bundle/deploy/terraform/destroy.go | 2 +- libs/cmdio/logger.go | 54 ++++++++++++++++++++++++------ libs/cmdio/logger_test.go | 2 +- libs/template/config.go | 12 +++---- 5 files changed, 50 insertions(+), 22 deletions(-) diff --git a/bundle/deploy/files/delete.go b/bundle/deploy/files/delete.go index 990eca47a..9f7ad4d41 100644 --- a/bundle/deploy/files/delete.go +++ b/bundle/deploy/files/delete.go @@ -27,7 +27,7 @@ func (m *delete) Apply(ctx context.Context, b *bundle.Bundle) error { red := color.New(color.FgRed).SprintFunc() if !b.AutoApprove { - proceed, err := cmdio.Ask(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) + proceed, err := cmdio.AskYesOrNo(ctx, fmt.Sprintf("\n%s and all files in it will be %s Proceed?", b.Config.Workspace.RootPath, red("deleted permanently!"))) if err != nil { return err } diff --git a/bundle/deploy/terraform/destroy.go b/bundle/deploy/terraform/destroy.go index 649542f6f..0b3baba3b 100644 --- a/bundle/deploy/terraform/destroy.go +++ b/bundle/deploy/terraform/destroy.go @@ -89,7 +89,7 @@ func (w *destroy) Apply(ctx context.Context, b *bundle.Bundle) error { // Ask for confirmation, if needed if !b.Plan.ConfirmApply { red := color.New(color.FgRed).SprintFunc() - b.Plan.ConfirmApply, err = cmdio.Ask(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) + b.Plan.ConfirmApply, err = cmdio.AskYesOrNo(ctx, fmt.Sprintf("\nThis will permanently %s resources! Proceed?", red("destroy"))) if err != nil { return err } diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index 3190a6a79..0663306e1 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -7,6 +7,7 @@ import ( "fmt" "io" "os" + "strings" "github.com/databricks/cli/libs/flags" ) @@ -74,33 +75,64 @@ func LogError(ctx context.Context, err error) { }) } -func Ask(ctx context.Context, question string) (bool, error) { +func Ask(ctx context.Context, question, defaultVal string) (string, error) { logger, ok := FromContext(ctx) if !ok { logger = Default() } - return logger.Ask(question) + return logger.Ask(question, defaultVal) } -func (l *Logger) Ask(question string) (bool, error) { - if l.Mode == flags.ModeJson { - return false, fmt.Errorf("question prompts are not supported in json mode") +func AskYesOrNo(ctx context.Context, question string) (bool, error) { + logger, ok := FromContext(ctx) + if !ok { + logger = Default() } // Add acceptable answers to the question prompt. - question += ` [y/n]:` - l.Writer.Write([]byte(question)) - ans, err := l.Reader.ReadString('\n') + question += ` [y/n]` + // Ask the question + ans, err := logger.Ask(question, "") if err != nil { return false, err } - if ans == "y\n" { + if ans == "y" { return true, nil - } else { - return false, nil } + return false, nil +} + +func (l *Logger) Ask(question string, defaultVal string) (string, error) { + if l.Mode == flags.ModeJson { + return "", fmt.Errorf("question prompts are not supported in json mode") + } + + // Add default value to question prompt. + if defaultVal != "" { + question += fmt.Sprintf(` [%s]`, defaultVal) + } + question += `: ` + + // print prompt + _, err := l.Writer.Write([]byte(question)) + if err != nil { + return "", err + } + + // read user input. Trim new line characters + ans, err := l.Reader.ReadString('\n') + if err != nil { + return "", err + } + ans = strings.Trim(ans, "\n\r") + + // Return default value if user just presses enter + if ans == "" { + return defaultVal, nil + } + return ans, nil } func (l *Logger) writeJson(event Event) { diff --git a/libs/cmdio/logger_test.go b/libs/cmdio/logger_test.go index ff715b11e..da6190462 100644 --- a/libs/cmdio/logger_test.go +++ b/libs/cmdio/logger_test.go @@ -9,6 +9,6 @@ import ( func TestAskFailedInJsonMode(t *testing.T) { l := NewLogger(flags.ModeJson) - _, err := l.Ask("What is your spirit animal?") + _, err := l.Ask("What is your spirit animal?", "") assert.ErrorContains(t, err, "question prompts are not supported in json mode") } diff --git a/libs/template/config.go b/libs/template/config.go index 173244b0b..302a13619 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -123,22 +123,18 @@ func (c *config) promptForValues() error { continue } - // Initialize Prompt dialog - var err error - prompt := cmdio.Prompt(c.ctx) - prompt.Label = property.Description - prompt.AllowEdit = true - // Compute default value to display by converting it to a string + var defaultVal string + var err error if property.Default != nil { - prompt.Default, err = toString(property.Default, property.Type) + defaultVal, err = toString(property.Default, property.Type) if err != nil { return err } } // Get user input by running the prompt - userInput, err := prompt.Run() + userInput, err := cmdio.Ask(c.ctx, property.Description, defaultVal) if err != nil { return err } From 6c644e159c290d992122e8d7dfc1760ffb1c41be Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 15 Aug 2023 18:07:22 +0200 Subject: [PATCH 062/139] Add map and pair helper functions for bundle templates (#665) ## Changes Go text templates allows only specifying one input argument for invocations of associated templates (ie `{{template ...}}`). This PR introduces the map and pair functions which allow template authors to work around this limitation by passing multiple arguments as key value pairs in a map. This PR is based on feedback from the mlops stacks migration where otherwise a bunch of duplicate code is required for computed values and fixtures. ## Tests Unit test --- libs/template/helpers.go | 24 +++++++++++++++++++ libs/template/helpers_test.go | 15 ++++++++++++ .../testdata/map-pair/library/abc.tmpl | 3 +++ .../testdata/map-pair/template/hello.tmpl | 1 + 4 files changed, 43 insertions(+) create mode 100644 libs/template/testdata/map-pair/library/abc.tmpl create mode 100644 libs/template/testdata/map-pair/template/hello.tmpl diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 94737c1eb..ac8466586 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -15,6 +15,11 @@ func (err ErrFail) Error() string { return err.msg } +type pair struct { + k string + v any +} + var helperFuncs = template.FuncMap{ "fail": func(format string, args ...any) (any, error) { return nil, ErrFail{fmt.Sprintf(format, args...)} @@ -27,4 +32,23 @@ var helperFuncs = template.FuncMap{ "regexp": func(expr string) (*regexp.Regexp, error) { return regexp.Compile(expr) }, + // A key value pair. This is used with the map function to generate maps + // to use inside a template + "pair": func(k string, v any) pair { + return pair{k, v} + }, + // map converts a list of pairs to a map object. This is useful to pass multiple + // objects to templates defined in the library directory. Go text template + // syntax for invoking a template only allows specifying a single argument, + // this function can be used to workaround that limitation. + // + // For example: {{template "my_template" (map (pair "foo" $arg1) (pair "bar" $arg2))}} + // $arg1 and $arg2 can be referred from inside "my_template" as ".foo" and ".bar" + "map": func(pairs ...pair) map[string]any { + result := make(map[string]any, 0) + for _, p := range pairs { + result[p.k] = p.v + } + return result + }, } diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index 169e06f35..023eed297 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -54,3 +54,18 @@ func TestTemplateUrlFunction(t *testing.T) { assert.Len(t, r.files, 1) assert.Equal(t, "https://www.databricks.com", string(r.files[0].(*inMemoryFile).content)) } + +func TestTemplateMapPairFunction(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + r, err := newRenderer(ctx, nil, "./testdata/map-pair/template", "./testdata/map-pair/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Equal(t, "false 123 hello 12.3", string(r.files[0].(*inMemoryFile).content)) +} diff --git a/libs/template/testdata/map-pair/library/abc.tmpl b/libs/template/testdata/map-pair/library/abc.tmpl new file mode 100644 index 000000000..387c7555a --- /dev/null +++ b/libs/template/testdata/map-pair/library/abc.tmpl @@ -0,0 +1,3 @@ +{{- define "my_template" -}} +{{- .foo}} {{.bar}} {{.abc}} {{.def -}} +{{- end -}} diff --git a/libs/template/testdata/map-pair/template/hello.tmpl b/libs/template/testdata/map-pair/template/hello.tmpl new file mode 100644 index 000000000..d0077846e --- /dev/null +++ b/libs/template/testdata/map-pair/template/hello.tmpl @@ -0,0 +1 @@ +{{template "my_template" (map (pair "foo" false) (pair "bar" 123) (pair "abc" "hello") (pair "def" 12.3)) -}} From 6a843f28efb2fb12726d327a9856be696d471c5a Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 15 Aug 2023 21:03:43 +0200 Subject: [PATCH 063/139] Correct name for force acquire deploy flag (#656) ## Changes As discussed here, the name for this flag should be `force-lock`: https://github.com/databricks/cli/pull/578#discussion_r1276233445 ## Tests Manually and existing tests --- cmd/bundle/deploy.go | 2 +- internal/locker_test.go | 2 +- libs/locker/locker.go | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cmd/bundle/deploy.go b/cmd/bundle/deploy.go index 807bb982d..8818bbbf4 100644 --- a/cmd/bundle/deploy.go +++ b/cmd/bundle/deploy.go @@ -17,7 +17,7 @@ func newDeployCommand() *cobra.Command { var forceLock bool var computeID string cmd.Flags().BoolVar(&force, "force", false, "Force-override Git branch validation.") - cmd.Flags().BoolVar(&forceLock, "force-deploy", false, "Force acquisition of deployment lock.") + cmd.Flags().BoolVar(&forceLock, "force-lock", false, "Force acquisition of deployment lock.") cmd.Flags().StringVarP(&computeID, "compute-id", "c", "", "Override compute in the deployment with the given compute ID.") cmd.RunE = func(cmd *cobra.Command, args []string) error { diff --git a/internal/locker_test.go b/internal/locker_test.go index 2c7e7aa8e..661838ecc 100644 --- a/internal/locker_test.go +++ b/internal/locker_test.go @@ -90,7 +90,7 @@ func TestAccLock(t *testing.T) { indexOfAnInactiveLocker = i } assert.ErrorContains(t, lockerErrs[i], "lock acquired by") - assert.ErrorContains(t, lockerErrs[i], "Use --force to override") + assert.ErrorContains(t, lockerErrs[i], "Use --force-lock to override") } } assert.Equal(t, 1, countActive, "Exactly one locker should successfull acquire the lock") diff --git a/libs/locker/locker.go b/libs/locker/locker.go index 66993156d..b0d65c42e 100644 --- a/libs/locker/locker.go +++ b/libs/locker/locker.go @@ -105,10 +105,10 @@ func (locker *Locker) assertLockHeld(ctx context.Context) error { return err } if activeLockState.ID != locker.State.ID && !activeLockState.IsForced { - return fmt.Errorf("deploy lock acquired by %s at %v. Use --force to override", activeLockState.User, activeLockState.AcquisitionTime) + return fmt.Errorf("deploy lock acquired by %s at %v. Use --force-lock to override", activeLockState.User, activeLockState.AcquisitionTime) } if activeLockState.ID != locker.State.ID && activeLockState.IsForced { - return fmt.Errorf("deploy lock force acquired by %s at %v. Use --force to override", activeLockState.User, activeLockState.AcquisitionTime) + return fmt.Errorf("deploy lock force acquired by %s at %v. Use --force-lock to override", activeLockState.User, activeLockState.AcquisitionTime) } return nil } From d225d7a662b68ecc9840002bac4a1256b4bffdf1 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 16 Aug 2023 13:28:57 +0200 Subject: [PATCH 064/139] Confirm that override with a zero value doesn't work (#669) ## Changes This is not desirable and will be addressed by representing our configuration in a different structure (e.g. with cty, or with plain `any`), instead of Go structs. ## Tests Pass. --- .../resources/databricks.yml | 16 ++++++++++++++++ bundle/tests/environment_overrides_test.go | 11 +++++++++++ 2 files changed, 27 insertions(+) diff --git a/bundle/tests/environment_overrides/resources/databricks.yml b/bundle/tests/environment_overrides/resources/databricks.yml index eef5dc010..df261ba03 100644 --- a/bundle/tests/environment_overrides/resources/databricks.yml +++ b/bundle/tests/environment_overrides/resources/databricks.yml @@ -9,6 +9,13 @@ resources: job1: name: "base job" + pipelines: + boolean1: + photon: true + + boolean2: + photon: false + environments: development: default: true @@ -18,3 +25,12 @@ environments: jobs: job1: name: "staging job" + + pipelines: + boolean1: + # Note: setting a property to a zero value (in Go) does not have effect. + # See the corresponding test for details. + photon: false + + boolean2: + photon: true diff --git a/bundle/tests/environment_overrides_test.go b/bundle/tests/environment_overrides_test.go index b8cc224a9..0a3f9fcd8 100644 --- a/bundle/tests/environment_overrides_test.go +++ b/bundle/tests/environment_overrides_test.go @@ -19,9 +19,20 @@ func TestEnvironmentOverridesWorkspaceStaging(t *testing.T) { func TestEnvironmentOverridesResourcesDev(t *testing.T) { b := loadEnvironment(t, "./environment_overrides/resources", "development") assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) + + // Base values are preserved in the development environment. + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon) + assert.Equal(t, false, b.Config.Resources.Pipelines["boolean2"].Photon) } func TestEnvironmentOverridesResourcesStaging(t *testing.T) { b := loadEnvironment(t, "./environment_overrides/resources", "staging") assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) + + // Overrides are only applied if they are not zero-valued. + // This means that in its current form, we cannot override a true value with a false value. + // Note: this is not desirable and will be addressed by representing our configuration + // in a different structure (e.g. with cty), instead of Go structs. + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean1"].Photon) + assert.Equal(t, true, b.Config.Resources.Pipelines["boolean2"].Photon) } From 35e8ed30c6207c239331c30491ecd2c34b1d123e Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 16 Aug 2023 14:56:12 +0200 Subject: [PATCH 065/139] Release v0.203.1 (#672) CLI: * Always resolve .databrickscfg file ([#659](https://github.com/databricks/cli/pull/659)). Bundles: * Add internal tag for bundle fields to be skipped from schema ([#636](https://github.com/databricks/cli/pull/636)). * Log the bundle root configuration file if applicable ([#657](https://github.com/databricks/cli/pull/657)). * Execute paths without the .tmpl extension as templates ([#654](https://github.com/databricks/cli/pull/654)). * Enable environment overrides for job clusters ([#658](https://github.com/databricks/cli/pull/658)). * Merge artifacts and resources block with overrides enabled ([#660](https://github.com/databricks/cli/pull/660)). * Locked terraform binary version to <= 1.5.5 ([#666](https://github.com/databricks/cli/pull/666)). * Return better error messages for invalid JSON schema types in templates ([#661](https://github.com/databricks/cli/pull/661)). * Use custom prompter for bundle template inputs ([#663](https://github.com/databricks/cli/pull/663)). * Add map and pair helper functions for bundle templates ([#665](https://github.com/databricks/cli/pull/665)). * Correct name for force acquire deploy flag ([#656](https://github.com/databricks/cli/pull/656)). * Confirm that override with a zero value doesn't work ([#669](https://github.com/databricks/cli/pull/669)). Internal: * Consolidate functions in libs/git ([#652](https://github.com/databricks/cli/pull/652)). * Upgraded Go version to 1.21 ([#664](https://github.com/databricks/cli/pull/664)). --- CHANGELOG.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cf7673b8..b0b6bc0ed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,27 @@ # Version changelog +## 0.203.1 + +CLI: + * Always resolve .databrickscfg file ([#659](https://github.com/databricks/cli/pull/659)). + +Bundles: + * Add internal tag for bundle fields to be skipped from schema ([#636](https://github.com/databricks/cli/pull/636)). + * Log the bundle root configuration file if applicable ([#657](https://github.com/databricks/cli/pull/657)). + * Execute paths without the .tmpl extension as templates ([#654](https://github.com/databricks/cli/pull/654)). + * Enable environment overrides for job clusters ([#658](https://github.com/databricks/cli/pull/658)). + * Merge artifacts and resources block with overrides enabled ([#660](https://github.com/databricks/cli/pull/660)). + * Locked terraform binary version to <= 1.5.5 ([#666](https://github.com/databricks/cli/pull/666)). + * Return better error messages for invalid JSON schema types in templates ([#661](https://github.com/databricks/cli/pull/661)). + * Use custom prompter for bundle template inputs ([#663](https://github.com/databricks/cli/pull/663)). + * Add map and pair helper functions for bundle templates ([#665](https://github.com/databricks/cli/pull/665)). + * Correct name for force acquire deploy flag ([#656](https://github.com/databricks/cli/pull/656)). + * Confirm that override with a zero value doesn't work ([#669](https://github.com/databricks/cli/pull/669)). + +Internal: + * Consolidate functions in libs/git ([#652](https://github.com/databricks/cli/pull/652)). + * Upgraded Go version to 1.21 ([#664](https://github.com/databricks/cli/pull/664)). + ## 0.203.0 CLI: From 4694832534b2b94d94835b9b57629ab99f05b50c Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 17 Aug 2023 11:11:39 +0200 Subject: [PATCH 066/139] Do not try auto detect Python package if no Python wheel tasks defined (#674) ## Changes Fixes #673 It also includes a change for `libraries` from #635 to get the list of wheel tasks --- bundle/artifacts/whl/autodetect.go | 7 +++++ bundle/libraries/libraries.go | 50 +++++++++++++++++++++--------- 2 files changed, 43 insertions(+), 14 deletions(-) diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go index a801b48d7..41d80bb76 100644 --- a/bundle/artifacts/whl/autodetect.go +++ b/bundle/artifacts/whl/autodetect.go @@ -10,7 +10,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" ) type detectPkg struct { @@ -25,6 +27,11 @@ func (m *detectPkg) Name() string { } func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { + wheelTasks := libraries.FindAllWheelTasks(b) + if len(wheelTasks) == 0 { + log.Infof(ctx, "No wheel tasks in databricks.yml config, skipping auto detect") + return nil + } cmdio.LogString(ctx, "artifacts.whl.AutoDetect: Detecting Python wheel project...") // checking if there is setup.py in the bundle root diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index 8ccf3fc7b..29848236c 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -25,26 +25,48 @@ func (a *match) Name() string { } func (a *match) Apply(ctx context.Context, b *bundle.Bundle) error { - r := b.Config.Resources - for k := range b.Config.Resources.Jobs { - tasks := r.Jobs[k].JobSettings.Tasks - for i := range tasks { - task := &tasks[i] - if isMissingRequiredLibraries(task) { - return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) - } - for j := range task.Libraries { - lib := &task.Libraries[j] - err := findArtifactsAndMarkForUpload(ctx, lib, b) - if err != nil { - return err - } + tasks := findAllTasks(b) + for _, task := range tasks { + if isMissingRequiredLibraries(task) { + return fmt.Errorf("task '%s' is missing required libraries. Please include your package code in task libraries block", task.TaskKey) + } + for j := range task.Libraries { + lib := &task.Libraries[j] + err := findArtifactsAndMarkForUpload(ctx, lib, b) + if err != nil { + return err } } } return nil } +func findAllTasks(b *bundle.Bundle) []*jobs.Task { + r := b.Config.Resources + result := make([]*jobs.Task, 0) + for k := range b.Config.Resources.Jobs { + tasks := r.Jobs[k].JobSettings.Tasks + for i := range tasks { + task := &tasks[i] + result = append(result, task) + } + } + + return result +} + +func FindAllWheelTasks(b *bundle.Bundle) []*jobs.Task { + tasks := findAllTasks(b) + wheelTasks := make([]*jobs.Task, 0) + for _, task := range tasks { + if task.PythonWheelTask != nil { + wheelTasks = append(wheelTasks, task) + } + } + + return wheelTasks +} + func isMissingRequiredLibraries(task *jobs.Task) bool { if task.Libraries != nil { return false From 56dcd3f0a7398bfda2fb517886d6690e9f0018b5 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 17 Aug 2023 17:22:32 +0200 Subject: [PATCH 067/139] Renamed `environments` to `targets` in bundle configuration (#670) ## Changes Renamed Environments to Targets in bundle.yml. The change is backward-compatible and customers can continue to use `environments` in the time being. ## Tests Added tests which checks that both `environments` and `targets` sections in bundle.yml works correctly --- bundle/bundle.go | 10 +-- bundle/bundle_test.go | 16 ++-- bundle/config/bundle.go | 9 +- bundle/config/mutator/default_environment.go | 37 -------- bundle/config/mutator/default_target.go | 37 ++++++++ ...ronment_test.go => default_target_test.go} | 16 ++-- .../config/mutator/default_workspace_root.go | 6 +- .../mutator/default_workspace_root_test.go | 4 +- bundle/config/mutator/mutator.go | 6 +- bundle/config/mutator/override_compute.go | 2 +- ...ronment_mode.go => process_target_mode.go} | 20 ++--- ...de_test.go => process_target_mode_test.go} | 22 ++--- .../mutator/select_default_environment.go | 54 ----------- .../select_default_environment_test.go | 90 ------------------- .../config/mutator/select_default_target.go | 54 +++++++++++ .../mutator/select_default_target_test.go | 90 +++++++++++++++++++ bundle/config/mutator/select_environment.go | 48 ---------- bundle/config/mutator/select_target.go | 54 +++++++++++ ...ironment_test.go => select_target_test.go} | 14 +-- bundle/config/resources.go | 2 +- bundle/config/resources/job.go | 2 +- bundle/config/root.go | 70 +++++++++------ bundle/config/root_test.go | 12 +-- bundle/config/{environment.go => target.go} | 12 +-- bundle/config/variable/variable.go | 2 +- bundle/config/workspace.go | 2 +- bundle/deploy/terraform/init_test.go | 16 ++-- bundle/deploy/terraform/load_test.go | 2 +- bundle/phases/initialize.go | 2 +- bundle/schema/README.md | 4 +- bundle/schema/docs.go | 16 ++-- bundle/schema/docs/bundle_descriptions.json | 6 +- bundle/tests/autoload_git/databricks.yml | 2 +- bundle/tests/environment_empty/databricks.yml | 5 -- bundle/tests/environment_empty_test.go | 12 --- bundle/tests/environment_git_test.go | 20 +++++ bundle/tests/environment_overrides_test.go | 8 +- .../environments_autoload_git/databricks.yml | 11 +++ .../databricks.yml | 44 +++++++++ .../environments_job_and_pipeline_test.go | 56 ++++++++++++ .../databricks.yml | 35 ++++++++ .../environments_override_job_cluster_test.go | 29 ++++++ bundle/tests/git_test.go | 2 +- .../tests/interpolation_target/databricks.yml | 14 +++ bundle/tests/interpolation_test.go | 12 +++ bundle/tests/job_and_pipeline/databricks.yml | 2 +- bundle/tests/job_and_pipeline_test.go | 6 +- bundle/tests/loader.go | 4 +- .../tests/override_job_cluster/databricks.yml | 2 +- bundle/tests/override_job_cluster_test.go | 4 +- bundle/tests/target_empty/databricks.yml | 5 ++ bundle/tests/target_empty_test.go | 12 +++ .../target_overrides/resources/databricks.yml | 20 +++++ .../target_overrides/workspace/databricks.yml | 14 +++ bundle/tests/target_overrides_test.go | 27 ++++++ .../variables/env_overrides/databricks.yml | 2 +- bundle/tests/variables_test.go | 20 ++--- cmd/bundle/variables.go | 2 +- cmd/configure/configure.go | 2 +- cmd/root/bundle.go | 48 +++++++--- cmd/root/bundle_test.go | 24 +++++ cmd/root/root.go | 1 + cmd/sync/sync_test.go | 2 +- 63 files changed, 768 insertions(+), 416 deletions(-) delete mode 100644 bundle/config/mutator/default_environment.go create mode 100644 bundle/config/mutator/default_target.go rename bundle/config/mutator/{default_environment_test.go => default_target_test.go} (51%) rename bundle/config/mutator/{process_environment_mode.go => process_target_mode.go} (89%) rename bundle/config/mutator/{process_environment_mode_test.go => process_target_mode_test.go} (90%) delete mode 100644 bundle/config/mutator/select_default_environment.go delete mode 100644 bundle/config/mutator/select_default_environment_test.go create mode 100644 bundle/config/mutator/select_default_target.go create mode 100644 bundle/config/mutator/select_default_target_test.go delete mode 100644 bundle/config/mutator/select_environment.go create mode 100644 bundle/config/mutator/select_target.go rename bundle/config/mutator/{select_environment_test.go => select_target_test.go} (62%) rename bundle/config/{environment.go => target.go} (80%) delete mode 100644 bundle/tests/environment_empty/databricks.yml delete mode 100644 bundle/tests/environment_empty_test.go create mode 100644 bundle/tests/environment_git_test.go create mode 100644 bundle/tests/environments_autoload_git/databricks.yml create mode 100644 bundle/tests/environments_job_and_pipeline/databricks.yml create mode 100644 bundle/tests/environments_job_and_pipeline_test.go create mode 100644 bundle/tests/environments_override_job_cluster/databricks.yml create mode 100644 bundle/tests/environments_override_job_cluster_test.go create mode 100644 bundle/tests/interpolation_target/databricks.yml create mode 100644 bundle/tests/target_empty/databricks.yml create mode 100644 bundle/tests/target_empty_test.go create mode 100644 bundle/tests/target_overrides/resources/databricks.yml create mode 100644 bundle/tests/target_overrides/workspace/databricks.yml create mode 100644 bundle/tests/target_overrides_test.go diff --git a/bundle/bundle.go b/bundle/bundle.go index 06c68fe8a..a5eaa2897 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -117,10 +117,10 @@ func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient { } // CacheDir returns directory to use for temporary files for this bundle. -// Scoped to the bundle's environment. +// Scoped to the bundle's target. func (b *Bundle) CacheDir(paths ...string) (string, error) { - if b.Config.Bundle.Environment == "" { - panic("environment not set") + if b.Config.Bundle.Target == "" { + panic("target not set") } cacheDirName, exists := os.LookupEnv("DATABRICKS_BUNDLE_TMP") @@ -138,8 +138,8 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) { // Fixed components of the result path. parts := []string{ cacheDirName, - // Scope with environment name. - b.Config.Bundle.Environment, + // Scope with target name. + b.Config.Bundle.Target, } // Append dynamic components of the result path. diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index ac9475000..4a3e7f2c9 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -31,16 +31,16 @@ func TestBundleCacheDir(t *testing.T) { bundle, err := Load(context.Background(), projectDir) require.NoError(t, err) - // Artificially set environment. - // This is otherwise done by [mutators.SelectEnvironment]. - bundle.Config.Bundle.Environment = "default" + // Artificially set target. + // This is otherwise done by [mutators.SelectTarget]. + bundle.Config.Bundle.Target = "default" // unset env variable in case it's set t.Setenv("DATABRICKS_BUNDLE_TMP", "") cacheDir, err := bundle.CacheDir() - // format is /.databricks/bundle/ + // format is /.databricks/bundle/ assert.NoError(t, err) assert.Equal(t, filepath.Join(projectDir, ".databricks", "bundle", "default"), cacheDir) } @@ -55,16 +55,16 @@ func TestBundleCacheDirOverride(t *testing.T) { bundle, err := Load(context.Background(), projectDir) require.NoError(t, err) - // Artificially set environment. - // This is otherwise done by [mutators.SelectEnvironment]. - bundle.Config.Bundle.Environment = "default" + // Artificially set target. + // This is otherwise done by [mutators.SelectTarget]. + bundle.Config.Bundle.Target = "default" // now we expect to use 'bundleTmpDir' instead of CWD/.databricks/bundle t.Setenv("DATABRICKS_BUNDLE_TMP", bundleTmpDir) cacheDir, err := bundle.CacheDir() - // format is / + // format is / assert.NoError(t, err) assert.Equal(t, filepath.Join(bundleTmpDir, "default"), cacheDir) } diff --git a/bundle/config/bundle.go b/bundle/config/bundle.go index f3401477f..d444f5077 100644 --- a/bundle/config/bundle.go +++ b/bundle/config/bundle.go @@ -15,7 +15,10 @@ type Bundle struct { // Default warehouse to run SQL on. // DefaultWarehouse string `json:"default_warehouse,omitempty"` - // Environment is set by the mutator that selects the environment. + // Target is set by the mutator that selects the target. + Target string `json:"target,omitempty" bundle:"readonly"` + + // DEPRECATED. Left for backward compatibility with Target Environment string `json:"environment,omitempty" bundle:"readonly"` // Terraform holds configuration related to Terraform. @@ -32,10 +35,10 @@ type Bundle struct { // origin url. Automatically loaded by reading .git directory if not specified Git Git `json:"git,omitempty"` - // Determines the mode of the environment. + // Determines the mode of the target. // For example, 'mode: development' can be used for deployments for // development purposes. - // Annotated readonly as this should be set at the environment level. + // Annotated readonly as this should be set at the target level. Mode Mode `json:"mode,omitempty" bundle:"readonly"` // Overrides the compute used for jobs and other supported assets. diff --git a/bundle/config/mutator/default_environment.go b/bundle/config/mutator/default_environment.go deleted file mode 100644 index 1598a647d..000000000 --- a/bundle/config/mutator/default_environment.go +++ /dev/null @@ -1,37 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" -) - -type defineDefaultEnvironment struct { - name string -} - -// DefineDefaultEnvironment adds an environment named "default" -// to the configuration if none have been defined. -func DefineDefaultEnvironment() bundle.Mutator { - return &defineDefaultEnvironment{ - name: "default", - } -} - -func (m *defineDefaultEnvironment) Name() string { - return fmt.Sprintf("DefineDefaultEnvironment(%s)", m.name) -} - -func (m *defineDefaultEnvironment) Apply(_ context.Context, b *bundle.Bundle) error { - // Nothing to do if the configuration has at least 1 environment. - if len(b.Config.Environments) > 0 { - return nil - } - - // Define default environment. - b.Config.Environments = make(map[string]*config.Environment) - b.Config.Environments[m.name] = &config.Environment{} - return nil -} diff --git a/bundle/config/mutator/default_target.go b/bundle/config/mutator/default_target.go new file mode 100644 index 000000000..d5318a3e2 --- /dev/null +++ b/bundle/config/mutator/default_target.go @@ -0,0 +1,37 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" +) + +type defineDefaultTarget struct { + name string +} + +// DefineDefaultTarget adds a target named "default" +// to the configuration if none have been defined. +func DefineDefaultTarget() bundle.Mutator { + return &defineDefaultTarget{ + name: "default", + } +} + +func (m *defineDefaultTarget) Name() string { + return fmt.Sprintf("DefineDefaultTarget(%s)", m.name) +} + +func (m *defineDefaultTarget) Apply(_ context.Context, b *bundle.Bundle) error { + // Nothing to do if the configuration has at least 1 target. + if len(b.Config.Targets) > 0 { + return nil + } + + // Define default target. + b.Config.Targets = make(map[string]*config.Target) + b.Config.Targets[m.name] = &config.Target{} + return nil +} diff --git a/bundle/config/mutator/default_environment_test.go b/bundle/config/mutator/default_target_test.go similarity index 51% rename from bundle/config/mutator/default_environment_test.go rename to bundle/config/mutator/default_target_test.go index f196e5bae..49fbe6de2 100644 --- a/bundle/config/mutator/default_environment_test.go +++ b/bundle/config/mutator/default_target_test.go @@ -11,25 +11,25 @@ import ( "github.com/stretchr/testify/require" ) -func TestDefaultEnvironment(t *testing.T) { +func TestDefaultTarget(t *testing.T) { bundle := &bundle.Bundle{} - err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle) + err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) require.NoError(t, err) - env, ok := bundle.Config.Environments["default"] + env, ok := bundle.Config.Targets["default"] assert.True(t, ok) - assert.Equal(t, &config.Environment{}, env) + assert.Equal(t, &config.Target{}, env) } -func TestDefaultEnvironmentAlreadySpecified(t *testing.T) { +func TestDefaultTargetAlreadySpecified(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "development": {}, }, }, } - err := mutator.DefineDefaultEnvironment().Apply(context.Background(), bundle) + err := mutator.DefineDefaultTarget().Apply(context.Background(), bundle) require.NoError(t, err) - _, ok := bundle.Config.Environments["default"] + _, ok := bundle.Config.Targets["default"] assert.False(t, ok) } diff --git a/bundle/config/mutator/default_workspace_root.go b/bundle/config/mutator/default_workspace_root.go index bf51eda9e..260a59584 100644 --- a/bundle/config/mutator/default_workspace_root.go +++ b/bundle/config/mutator/default_workspace_root.go @@ -27,14 +27,14 @@ func (m *defineDefaultWorkspaceRoot) Apply(ctx context.Context, b *bundle.Bundle return fmt.Errorf("unable to define default workspace root: bundle name not defined") } - if b.Config.Bundle.Environment == "" { - return fmt.Errorf("unable to define default workspace root: bundle environment not selected") + if b.Config.Bundle.Target == "" { + return fmt.Errorf("unable to define default workspace root: bundle target not selected") } b.Config.Workspace.RootPath = fmt.Sprintf( "~/.bundle/%s/%s", b.Config.Bundle.Name, - b.Config.Bundle.Environment, + b.Config.Bundle.Target, ) return nil } diff --git a/bundle/config/mutator/default_workspace_root_test.go b/bundle/config/mutator/default_workspace_root_test.go index 4a78e6e5c..1822dca0f 100644 --- a/bundle/config/mutator/default_workspace_root_test.go +++ b/bundle/config/mutator/default_workspace_root_test.go @@ -15,8 +15,8 @@ func TestDefaultWorkspaceRoot(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ - Name: "name", - Environment: "environment", + Name: "name", + Target: "environment", }, }, } diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index 058258c87..ff1f96f50 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -7,11 +7,11 @@ import ( func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ ProcessRootIncludes(), - DefineDefaultEnvironment(), + DefineDefaultTarget(), LoadGitDetails(), } } -func DefaultMutatorsForEnvironment(env string) []bundle.Mutator { - return append(DefaultMutators(), SelectEnvironment(env)) +func DefaultMutatorsForTarget(env string) []bundle.Mutator { + return append(DefaultMutators(), SelectTarget(env)) } diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index ba3fd9940..124392491 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -35,7 +35,7 @@ func overrideJobCompute(j *resources.Job, compute string) { func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error { if b.Config.Bundle.Mode != config.Development { if b.Config.Bundle.ComputeID != "" { - return fmt.Errorf("cannot override compute for an environment that does not use 'mode: development'") + return fmt.Errorf("cannot override compute for an target that does not use 'mode: development'") } return nil } diff --git a/bundle/config/mutator/process_environment_mode.go b/bundle/config/mutator/process_target_mode.go similarity index 89% rename from bundle/config/mutator/process_environment_mode.go rename to bundle/config/mutator/process_target_mode.go index d20302347..b5dc25598 100644 --- a/bundle/config/mutator/process_environment_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -13,16 +13,16 @@ import ( "github.com/databricks/databricks-sdk-go/service/ml" ) -type processEnvironmentMode struct{} +type processTargetMode struct{} const developmentConcurrentRuns = 4 -func ProcessEnvironmentMode() bundle.Mutator { - return &processEnvironmentMode{} +func ProcessTargetMode() bundle.Mutator { + return &processTargetMode{} } -func (m *processEnvironmentMode) Name() string { - return "ProcessEnvironmentMode" +func (m *processTargetMode) Name() string { + return "ProcessTargetMode" } // Mark all resources as being for 'development' purposes, i.e. @@ -110,14 +110,14 @@ func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string { func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { if b.Config.Bundle.Git.Inferred { - env := b.Config.Bundle.Environment - return fmt.Errorf("environment with 'mode: production' must specify an explicit 'environments.%s.git' configuration", env) + env := b.Config.Bundle.Target + return fmt.Errorf("target with 'mode: production' must specify an explicit 'targets.%s.git' configuration", env) } r := b.Config.Resources for i := range r.Pipelines { if r.Pipelines[i].Development { - return fmt.Errorf("environment with 'mode: production' cannot specify a pipeline with 'development: true'") + return fmt.Errorf("target with 'mode: production' cannot specify a pipeline with 'development: true'") } } @@ -125,7 +125,7 @@ func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUs if path := findIncorrectPath(b, config.Production); path != "" { message := "%s must not contain the current username when using 'mode: production'" if path == "root_path" { - return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.environment}", path) + return fmt.Errorf(message+"\n tip: set workspace.root_path to a shared path such as /Shared/.bundle/${bundle.name}/${bundle.target}", path) } else { return fmt.Errorf(message, path) } @@ -165,7 +165,7 @@ func isRunAsSet(r config.Resources) bool { return true } -func (m *processEnvironmentMode) Apply(ctx context.Context, b *bundle.Bundle) error { +func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { switch b.Config.Bundle.Mode { case config.Development: err := validateDevelopmentMode(b) diff --git a/bundle/config/mutator/process_environment_mode_test.go b/bundle/config/mutator/process_target_mode_test.go similarity index 90% rename from bundle/config/mutator/process_environment_mode_test.go rename to bundle/config/mutator/process_target_mode_test.go index 36e0396e2..76db64dee 100644 --- a/bundle/config/mutator/process_environment_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -58,10 +58,10 @@ func mockBundle(mode config.Mode) *bundle.Bundle { } } -func TestProcessEnvironmentModeDevelopment(t *testing.T) { +func TestProcessTargetModeDevelopment(t *testing.T) { bundle := mockBundle(config.Development) - m := ProcessEnvironmentMode() + m := ProcessTargetMode() err := m.Apply(context.Background(), bundle) require.NoError(t, err) assert.Equal(t, "[dev lennart] job1", bundle.Config.Resources.Jobs["job1"].Name) @@ -73,10 +73,10 @@ func TestProcessEnvironmentModeDevelopment(t *testing.T) { assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } -func TestProcessEnvironmentModeDefault(t *testing.T) { +func TestProcessTargetModeDefault(t *testing.T) { bundle := mockBundle("") - m := ProcessEnvironmentMode() + m := ProcessTargetMode() err := m.Apply(context.Background(), bundle) require.NoError(t, err) assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) @@ -84,7 +84,7 @@ func TestProcessEnvironmentModeDefault(t *testing.T) { assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } -func TestProcessEnvironmentModeProduction(t *testing.T) { +func TestProcessTargetModeProduction(t *testing.T) { bundle := mockBundle(config.Production) err := validateProductionMode(context.Background(), bundle, false) @@ -118,7 +118,7 @@ func TestProcessEnvironmentModeProduction(t *testing.T) { assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } -func TestProcessEnvironmentModeProductionGit(t *testing.T) { +func TestProcessTargetModeProductionGit(t *testing.T) { bundle := mockBundle(config.Production) // Pretend the user didn't set Git configuration explicitly @@ -129,10 +129,10 @@ func TestProcessEnvironmentModeProductionGit(t *testing.T) { bundle.Config.Bundle.Git.Inferred = false } -func TestProcessEnvironmentModeProductionOkForPrincipal(t *testing.T) { +func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { bundle := mockBundle(config.Production) - // Our environment has all kinds of problems when not using service principals ... + // Our target has all kinds of problems when not using service principals ... err := validateProductionMode(context.Background(), bundle, false) require.Error(t, err) @@ -152,7 +152,7 @@ func TestAllResourcesMocked(t *testing.T) { assert.True( t, !field.IsNil() && field.Len() > 0, - "process_environment_mode should support '%s' (please add it to process_environment_mode.go and extend the test suite)", + "process_target_mode should support '%s' (please add it to process_target_mode.go and extend the test suite)", resources.Type().Field(i).Name, ) } @@ -164,7 +164,7 @@ func TestAllResourcesRenamed(t *testing.T) { bundle := mockBundle(config.Development) resources := reflect.ValueOf(bundle.Config.Resources) - m := ProcessEnvironmentMode() + m := ProcessTargetMode() err := m.Apply(context.Background(), bundle) require.NoError(t, err) @@ -179,7 +179,7 @@ func TestAllResourcesRenamed(t *testing.T) { assert.True( t, strings.Contains(nameField.String(), "dev"), - "process_environment_mode should rename '%s' in '%s'", + "process_target_mode should rename '%s' in '%s'", key, resources.Type().Field(i).Name, ) diff --git a/bundle/config/mutator/select_default_environment.go b/bundle/config/mutator/select_default_environment.go deleted file mode 100644 index 0ed1d2db9..000000000 --- a/bundle/config/mutator/select_default_environment.go +++ /dev/null @@ -1,54 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - "strings" - - "github.com/databricks/cli/bundle" - "golang.org/x/exp/maps" -) - -type selectDefaultEnvironment struct{} - -// SelectDefaultEnvironment merges the default environment into the root configuration. -func SelectDefaultEnvironment() bundle.Mutator { - return &selectDefaultEnvironment{} -} - -func (m *selectDefaultEnvironment) Name() string { - return "SelectDefaultEnvironment" -} - -func (m *selectDefaultEnvironment) Apply(ctx context.Context, b *bundle.Bundle) error { - if len(b.Config.Environments) == 0 { - return fmt.Errorf("no environments defined") - } - - // One environment means there's only one default. - names := maps.Keys(b.Config.Environments) - if len(names) == 1 { - return SelectEnvironment(names[0]).Apply(ctx, b) - } - - // Multiple environments means we look for the `default` flag. - var defaults []string - for name, env := range b.Config.Environments { - if env != nil && env.Default { - defaults = append(defaults, name) - } - } - - // It is invalid to have multiple environments with the `default` flag set. - if len(defaults) > 1 { - return fmt.Errorf("multiple environments are marked as default (%s)", strings.Join(defaults, ", ")) - } - - // If no environment has the `default` flag set, ask the user to specify one. - if len(defaults) == 0 { - return fmt.Errorf("please specify environment") - } - - // One default remaining. - return SelectEnvironment(defaults[0]).Apply(ctx, b) -} diff --git a/bundle/config/mutator/select_default_environment_test.go b/bundle/config/mutator/select_default_environment_test.go deleted file mode 100644 index cc8f9c01d..000000000 --- a/bundle/config/mutator/select_default_environment_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package mutator_test - -import ( - "context" - "testing" - - "github.com/databricks/cli/bundle" - "github.com/databricks/cli/bundle/config" - "github.com/databricks/cli/bundle/config/mutator" - "github.com/stretchr/testify/assert" -) - -func TestSelectDefaultEnvironmentNoEnvironments(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{}, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "no environments defined") -} - -func TestSelectDefaultEnvironmentSingleEnvironments(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.NoError(t, err) - assert.Equal(t, "foo", bundle.Config.Bundle.Environment) -} - -func TestSelectDefaultEnvironmentNoDefaults(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - "bar": {}, - "qux": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "please specify environment") -} - -func TestSelectDefaultEnvironmentNoDefaultsWithNil(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": nil, - "bar": nil, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "please specify environment") -} - -func TestSelectDefaultEnvironmentMultipleDefaults(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {Default: true}, - "bar": {Default: true}, - "qux": {Default: true}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.ErrorContains(t, err, "multiple environments are marked as default") -} - -func TestSelectDefaultEnvironmentSingleDefault(t *testing.T) { - bundle := &bundle.Bundle{ - Config: config.Root{ - Environments: map[string]*config.Environment{ - "foo": {}, - "bar": {Default: true}, - "qux": {}, - }, - }, - } - err := mutator.SelectDefaultEnvironment().Apply(context.Background(), bundle) - assert.NoError(t, err) - assert.Equal(t, "bar", bundle.Config.Bundle.Environment) -} diff --git a/bundle/config/mutator/select_default_target.go b/bundle/config/mutator/select_default_target.go new file mode 100644 index 000000000..8abcfe4ff --- /dev/null +++ b/bundle/config/mutator/select_default_target.go @@ -0,0 +1,54 @@ +package mutator + +import ( + "context" + "fmt" + "strings" + + "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" +) + +type selectDefaultTarget struct{} + +// SelectDefaultTarget merges the default target into the root configuration. +func SelectDefaultTarget() bundle.Mutator { + return &selectDefaultTarget{} +} + +func (m *selectDefaultTarget) Name() string { + return "SelectDefaultTarget" +} + +func (m *selectDefaultTarget) Apply(ctx context.Context, b *bundle.Bundle) error { + if len(b.Config.Targets) == 0 { + return fmt.Errorf("no targets defined") + } + + // One target means there's only one default. + names := maps.Keys(b.Config.Targets) + if len(names) == 1 { + return SelectTarget(names[0]).Apply(ctx, b) + } + + // Multiple targets means we look for the `default` flag. + var defaults []string + for name, env := range b.Config.Targets { + if env != nil && env.Default { + defaults = append(defaults, name) + } + } + + // It is invalid to have multiple targets with the `default` flag set. + if len(defaults) > 1 { + return fmt.Errorf("multiple targets are marked as default (%s)", strings.Join(defaults, ", ")) + } + + // If no target has the `default` flag set, ask the user to specify one. + if len(defaults) == 0 { + return fmt.Errorf("please specify target") + } + + // One default remaining. + return SelectTarget(defaults[0]).Apply(ctx, b) +} diff --git a/bundle/config/mutator/select_default_target_test.go b/bundle/config/mutator/select_default_target_test.go new file mode 100644 index 000000000..5d7b93b28 --- /dev/null +++ b/bundle/config/mutator/select_default_target_test.go @@ -0,0 +1,90 @@ +package mutator_test + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/assert" +) + +func TestSelectDefaultTargetNoTargets(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{}, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "no targets defined") +} + +func TestSelectDefaultTargetSingleTargets(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.NoError(t, err) + assert.Equal(t, "foo", bundle.Config.Bundle.Target) +} + +func TestSelectDefaultTargetNoDefaults(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + "bar": {}, + "qux": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "please specify target") +} + +func TestSelectDefaultTargetNoDefaultsWithNil(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": nil, + "bar": nil, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "please specify target") +} + +func TestSelectDefaultTargetMultipleDefaults(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {Default: true}, + "bar": {Default: true}, + "qux": {Default: true}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.ErrorContains(t, err, "multiple targets are marked as default") +} + +func TestSelectDefaultTargetSingleDefault(t *testing.T) { + bundle := &bundle.Bundle{ + Config: config.Root{ + Targets: map[string]*config.Target{ + "foo": {}, + "bar": {Default: true}, + "qux": {}, + }, + }, + } + err := mutator.SelectDefaultTarget().Apply(context.Background(), bundle) + assert.NoError(t, err) + assert.Equal(t, "bar", bundle.Config.Bundle.Target) +} diff --git a/bundle/config/mutator/select_environment.go b/bundle/config/mutator/select_environment.go deleted file mode 100644 index 6ced66e86..000000000 --- a/bundle/config/mutator/select_environment.go +++ /dev/null @@ -1,48 +0,0 @@ -package mutator - -import ( - "context" - "fmt" - - "github.com/databricks/cli/bundle" -) - -type selectEnvironment struct { - name string -} - -// SelectEnvironment merges the specified environment into the root configuration. -func SelectEnvironment(name string) bundle.Mutator { - return &selectEnvironment{ - name: name, - } -} - -func (m *selectEnvironment) Name() string { - return fmt.Sprintf("SelectEnvironment(%s)", m.name) -} - -func (m *selectEnvironment) Apply(_ context.Context, b *bundle.Bundle) error { - if b.Config.Environments == nil { - return fmt.Errorf("no environments defined") - } - - // Get specified environment - env, ok := b.Config.Environments[m.name] - if !ok { - return fmt.Errorf("%s: no such environment", m.name) - } - - // Merge specified environment into root configuration structure. - err := b.Config.MergeEnvironment(env) - if err != nil { - return err - } - - // Store specified environment in configuration for reference. - b.Config.Bundle.Environment = m.name - - // Clear environments after loading. - b.Config.Environments = nil - return nil -} diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go new file mode 100644 index 000000000..3be1f2e1a --- /dev/null +++ b/bundle/config/mutator/select_target.go @@ -0,0 +1,54 @@ +package mutator + +import ( + "context" + "fmt" + + "github.com/databricks/cli/bundle" +) + +type selectTarget struct { + name string +} + +// SelectTarget merges the specified target into the root configuration. +func SelectTarget(name string) bundle.Mutator { + return &selectTarget{ + name: name, + } +} + +func (m *selectTarget) Name() string { + return fmt.Sprintf("SelectTarget(%s)", m.name) +} + +func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { + if b.Config.Targets == nil { + return fmt.Errorf("no targets defined") + } + + // Get specified target + target, ok := b.Config.Targets[m.name] + if !ok { + return fmt.Errorf("%s: no such target", m.name) + } + + // Merge specified target into root configuration structure. + err := b.Config.MergeTargetOverrides(target) + if err != nil { + return err + } + + // Store specified target in configuration for reference. + b.Config.Bundle.Target = m.name + + // We do this for backward compatibility. + // TODO: remove when Environments section is not supported anymore. + b.Config.Bundle.Environment = b.Config.Bundle.Target + + // Clear targets after loading. + b.Config.Targets = nil + b.Config.Environments = nil + + return nil +} diff --git a/bundle/config/mutator/select_environment_test.go b/bundle/config/mutator/select_target_test.go similarity index 62% rename from bundle/config/mutator/select_environment_test.go rename to bundle/config/mutator/select_target_test.go index 73b3a7893..dfcd8cb08 100644 --- a/bundle/config/mutator/select_environment_test.go +++ b/bundle/config/mutator/select_target_test.go @@ -11,13 +11,13 @@ import ( "github.com/stretchr/testify/require" ) -func TestSelectEnvironment(t *testing.T) { +func TestSelectTarget(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ Workspace: config.Workspace{ Host: "foo", }, - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "default": { Workspace: &config.Workspace{ Host: "bar", @@ -26,19 +26,19 @@ func TestSelectEnvironment(t *testing.T) { }, }, } - err := mutator.SelectEnvironment("default").Apply(context.Background(), bundle) + err := mutator.SelectTarget("default").Apply(context.Background(), bundle) require.NoError(t, err) assert.Equal(t, "bar", bundle.Config.Workspace.Host) } -func TestSelectEnvironmentNotFound(t *testing.T) { +func TestSelectTargetNotFound(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ - Environments: map[string]*config.Environment{ + Targets: map[string]*config.Target{ "default": {}, }, }, } - err := mutator.SelectEnvironment("doesnt-exist").Apply(context.Background(), bundle) - require.Error(t, err, "no environments defined") + err := mutator.SelectTarget("doesnt-exist").Apply(context.Background(), bundle) + require.Error(t, err, "no targets defined") } diff --git a/bundle/config/resources.go b/bundle/config/resources.go index b15158b45..5d47b918c 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -115,7 +115,7 @@ func (r *Resources) SetConfigFilePath(path string) { } // MergeJobClusters iterates over all jobs and merges their job clusters. -// This is called after applying the environment overrides. +// This is called after applying the target overrides. func (r *Resources) MergeJobClusters() error { for _, job := range r.Jobs { if err := job.MergeJobClusters(); err != nil { diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index 327d7e13e..6200062a8 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -22,7 +22,7 @@ func (j *Job) MergeJobClusters() error { keys := make(map[string]*jobs.JobCluster) output := make([]jobs.JobCluster, 0, len(j.JobClusters)) - // Environment overrides are always appended, so we can iterate in natural order to + // Target overrides are always appended, so we can iterate in natural order to // first find the base definition, and merge instances we encounter later. for i := range j.JobClusters { key := j.JobClusters[i].JobClusterKey diff --git a/bundle/config/root.go b/bundle/config/root.go index b6d1efc96..24426dd89 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -69,11 +69,14 @@ type Root struct { // to deploy in this bundle (e.g. jobs, pipelines, etc.). Resources Resources `json:"resources,omitempty"` - // Environments can be used to differentiate settings and resources between - // bundle deployment environments (e.g. development, staging, production). + // Targets can be used to differentiate settings and resources between + // bundle deployment targets (e.g. development, staging, production). // If not specified, the code below initializes this field with a - // single default-initialized environment called "default". - Environments map[string]*Environment `json:"environments,omitempty"` + // single default-initialized target called "default". + Targets map[string]*Target `json:"targets,omitempty"` + + // DEPRECATED. Left for backward compatibility with Targets + Environments map[string]*Target `json:"environments,omitempty"` } func Load(path string) (*Root, error) { @@ -103,8 +106,8 @@ func Load(path string) (*Root, error) { // was loaded from in configuration leafs that require it. func (r *Root) SetConfigFilePath(path string) { r.Resources.SetConfigFilePath(path) - if r.Environments != nil { - for _, env := range r.Environments { + if r.Targets != nil { + for _, env := range r.Targets { if env == nil { continue } @@ -148,6 +151,15 @@ func (r *Root) Load(path string) error { return fmt.Errorf("failed to load %s: %w", path, err) } + if r.Environments != nil && r.Targets != nil { + return fmt.Errorf("both 'environments' and 'targets' are specified, only 'targets' should be used: %s", path) + } + + if r.Environments != nil { + //TODO: add a command line notice that this is a deprecated option. + r.Targets = r.Environments + } + r.Path = filepath.Dir(path) r.SetConfigFilePath(path) @@ -169,37 +181,37 @@ func (r *Root) Merge(other *Root) error { return mergo.Merge(r, other, mergo.WithOverride) } -func (r *Root) MergeEnvironment(env *Environment) error { +func (r *Root) MergeTargetOverrides(target *Target) error { var err error - // Environment may be nil if it's empty. - if env == nil { + // Target may be nil if it's empty. + if target == nil { return nil } - if env.Bundle != nil { - err = mergo.Merge(&r.Bundle, env.Bundle, mergo.WithOverride) + if target.Bundle != nil { + err = mergo.Merge(&r.Bundle, target.Bundle, mergo.WithOverride) if err != nil { return err } } - if env.Workspace != nil { - err = mergo.Merge(&r.Workspace, env.Workspace, mergo.WithOverride) + if target.Workspace != nil { + err = mergo.Merge(&r.Workspace, target.Workspace, mergo.WithOverride) if err != nil { return err } } - if env.Artifacts != nil { - err = mergo.Merge(&r.Artifacts, env.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) + if target.Artifacts != nil { + err = mergo.Merge(&r.Artifacts, target.Artifacts, mergo.WithOverride, mergo.WithAppendSlice) if err != nil { return err } } - if env.Resources != nil { - err = mergo.Merge(&r.Resources, env.Resources, mergo.WithOverride, mergo.WithAppendSlice) + if target.Resources != nil { + err = mergo.Merge(&r.Resources, target.Resources, mergo.WithOverride, mergo.WithAppendSlice) if err != nil { return err } @@ -210,8 +222,8 @@ func (r *Root) MergeEnvironment(env *Environment) error { } } - if env.Variables != nil { - for k, v := range env.Variables { + if target.Variables != nil { + for k, v := range target.Variables { variable, ok := r.Variables[k] if !ok { return fmt.Errorf("variable %s is not defined but is assigned a value", k) @@ -222,24 +234,24 @@ func (r *Root) MergeEnvironment(env *Environment) error { } } - if env.Mode != "" { - r.Bundle.Mode = env.Mode + if target.Mode != "" { + r.Bundle.Mode = target.Mode } - if env.ComputeID != "" { - r.Bundle.ComputeID = env.ComputeID + if target.ComputeID != "" { + r.Bundle.ComputeID = target.ComputeID } git := &r.Bundle.Git - if env.Git.Branch != "" { - git.Branch = env.Git.Branch + if target.Git.Branch != "" { + git.Branch = target.Git.Branch git.Inferred = false } - if env.Git.Commit != "" { - git.Commit = env.Git.Commit + if target.Git.Commit != "" { + git.Commit = target.Git.Commit } - if env.Git.OriginURL != "" { - git.OriginURL = env.Git.OriginURL + if target.Git.OriginURL != "" { + git.OriginURL = target.Git.OriginURL } return nil diff --git a/bundle/config/root_test.go b/bundle/config/root_test.go index 531ffcec1..6e2636678 100644 --- a/bundle/config/root_test.go +++ b/bundle/config/root_test.go @@ -57,7 +57,7 @@ func TestRootMergeStruct(t *testing.T) { func TestRootMergeMap(t *testing.T) { root := &Root{ Path: "path", - Environments: map[string]*Environment{ + Targets: map[string]*Target{ "development": { Workspace: &Workspace{ Host: "foo", @@ -68,7 +68,7 @@ func TestRootMergeMap(t *testing.T) { } other := &Root{ Path: "path", - Environments: map[string]*Environment{ + Targets: map[string]*Target{ "development": { Workspace: &Workspace{ Host: "bar", @@ -77,7 +77,7 @@ func TestRootMergeMap(t *testing.T) { }, } assert.NoError(t, root.Merge(other)) - assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Environments["development"].Workspace) + assert.Equal(t, &Workspace{Host: "bar", Profile: "profile"}, root.Targets["development"].Workspace) } func TestDuplicateIdOnLoadReturnsError(t *testing.T) { @@ -159,12 +159,12 @@ func TestInitializeVariablesUndefinedVariables(t *testing.T) { assert.ErrorContains(t, err, "variable bar has not been defined") } -func TestRootMergeEnvironmentWithMode(t *testing.T) { +func TestRootMergeTargetOverridesWithMode(t *testing.T) { root := &Root{ Bundle: Bundle{}, } - env := &Environment{Mode: Development} - require.NoError(t, root.MergeEnvironment(env)) + env := &Target{Mode: Development} + require.NoError(t, root.MergeTargetOverrides(env)) assert.Equal(t, Development, root.Bundle.Mode) } diff --git a/bundle/config/environment.go b/bundle/config/target.go similarity index 80% rename from bundle/config/environment.go rename to bundle/config/target.go index 7152f791f..10775049d 100644 --- a/bundle/config/environment.go +++ b/bundle/config/target.go @@ -2,14 +2,14 @@ package config type Mode string -// Environment defines overrides for a single environment. +// Target defines overrides for a single target. // This structure is recursively merged into the root configuration. -type Environment struct { - // Default marks that this environment must be used if one isn't specified - // by the user (through environment variable or command line argument). +type Target struct { + // Default marks that this target must be used if one isn't specified + // by the user (through target variable or command line argument). Default bool `json:"default,omitempty"` - // Determines the mode of the environment. + // Determines the mode of the target. // For example, 'mode: development' can be used for deployments for // development purposes. Mode Mode `json:"mode,omitempty"` @@ -27,7 +27,7 @@ type Environment struct { // Override default values for defined variables // Does not permit defining new variables or redefining existing ones - // in the scope of an environment + // in the scope of an target Variables map[string]string `json:"variables,omitempty"` Git Git `json:"git,omitempty"` diff --git a/bundle/config/variable/variable.go b/bundle/config/variable/variable.go index 132920bb9..73925d432 100644 --- a/bundle/config/variable/variable.go +++ b/bundle/config/variable/variable.go @@ -18,7 +18,7 @@ type Variable struct { // resolved in the following priority order (from highest to lowest) // // 1. Command line flag. For example: `--var="foo=bar"` - // 2. Environment variable. eg: BUNDLE_VAR_foo=bar + // 2. Target variable. eg: BUNDLE_VAR_foo=bar // 3. Default value as defined in the applicable environments block // 4. Default value defined in variable definition // 5. Throw error, since if no default value is defined, then the variable diff --git a/bundle/config/workspace.go b/bundle/config/workspace.go index bd116a9cb..90cd59c6f 100644 --- a/bundle/config/workspace.go +++ b/bundle/config/workspace.go @@ -45,7 +45,7 @@ type Workspace struct { CurrentUser *User `json:"current_user,omitempty" bundle:"readonly"` // Remote workspace base path for deployment state, for artifacts, as synchronization target. - // This defaults to "~/.bundle/${bundle.name}/${bundle.environment}" where "~" expands to + // This defaults to "~/.bundle/${bundle.name}/${bundle.target}" where "~" expands to // the current user's home directory in the workspace (e.g. `/Users/jane@doe.com`). RootPath string `json:"root_path,omitempty"` diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 79e18170e..5bb5929e6 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -31,7 +31,7 @@ func TestInitEnvironmentVariables(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", Terraform: &config.Terraform{ ExecPath: "terraform", }, @@ -58,7 +58,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -86,7 +86,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -112,7 +112,7 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -142,7 +142,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -172,7 +172,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -202,7 +202,7 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } @@ -230,7 +230,7 @@ func TestSetProxyEnvVars(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", }, }, } diff --git a/bundle/deploy/terraform/load_test.go b/bundle/deploy/terraform/load_test.go index c235c08e8..1937ca8a2 100644 --- a/bundle/deploy/terraform/load_test.go +++ b/bundle/deploy/terraform/load_test.go @@ -20,7 +20,7 @@ func TestLoadWithNoState(t *testing.T) { Config: config.Root{ Path: t.TempDir(), Bundle: config.Bundle{ - Environment: "whatever", + Target: "whatever", Terraform: &config.Terraform{ ExecPath: "terraform", }, diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index fc5056f63..219ec26cf 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -26,7 +26,7 @@ func Initialize() bundle.Mutator { interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), ), mutator.OverrideCompute(), - mutator.ProcessEnvironmentMode(), + mutator.ProcessTargetMode(), mutator.TranslatePaths(), terraform.Initialize(), }, diff --git a/bundle/schema/README.md b/bundle/schema/README.md index 4df43cf23..fe6b149c1 100644 --- a/bundle/schema/README.md +++ b/bundle/schema/README.md @@ -3,7 +3,7 @@ `docs/bundle_descriptions.json` contains both autogenerated as well as manually written descriptions for the json schema. Specifically 1. `resources` : almost all descriptions are autogenerated from the OpenAPI spec -2. `environments` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`) +2. `targets` : almost all descriptions are copied over from root level entities (eg: `bundle`, `artifacts`) 3. `bundle` : manually editted 4. `include` : manually editted 5. `workspace` : manually editted @@ -17,7 +17,7 @@ These descriptions are rendered in the inline documentation in an IDE `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` 2. Manually edit bundle_descriptions.json to add your descriptions 3. Build again to embed the new `bundle_descriptions.json` into the binary (`go build`) -4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `environments` +4. Again run `databricks bundle schema --only-docs > ~/databricks/bundle/schema/docs/bundle_descriptions.json` to copy over any applicable descriptions to `targets` 5. push to repo diff --git a/bundle/schema/docs.go b/bundle/schema/docs.go index 5fcef4edd..4b2fd36ae 100644 --- a/bundle/schema/docs.go +++ b/bundle/schema/docs.go @@ -52,20 +52,20 @@ func BundleDocs(openapiSpecPath string) (*Docs, error) { } docs.Properties["resources"] = schemaToDocs(resourceSchema) } - docs.refreshEnvironmentsDocs() + docs.refreshTargetsDocs() return docs, nil } -func (docs *Docs) refreshEnvironmentsDocs() error { - environmentsDocs, ok := docs.Properties["environments"] - if !ok || environmentsDocs.AdditionalProperties == nil || - environmentsDocs.AdditionalProperties.Properties == nil { - return fmt.Errorf("invalid environments descriptions") +func (docs *Docs) refreshTargetsDocs() error { + targetsDocs, ok := docs.Properties["targets"] + if !ok || targetsDocs.AdditionalProperties == nil || + targetsDocs.AdditionalProperties.Properties == nil { + return fmt.Errorf("invalid targets descriptions") } - environmentProperties := environmentsDocs.AdditionalProperties.Properties + targetProperties := targetsDocs.AdditionalProperties.Properties propertiesToCopy := []string{"artifacts", "bundle", "resources", "workspace"} for _, p := range propertiesToCopy { - environmentProperties[p] = docs.Properties[p] + targetProperties[p] = docs.Properties[p] } return nil } diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 2adb11f21..84f0492fb 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -36,7 +36,7 @@ } } }, - "environments": { + "targets": { "description": "", "additionalproperties": { "description": "", @@ -1827,7 +1827,7 @@ "description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg." }, "root_path": { - "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`" + "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`" }, "state_path": { "description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`" @@ -3591,7 +3591,7 @@ "description": "Connection profile to use. By default profiles are specified in ~/.databrickscfg." }, "root_path": { - "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.environment}`" + "description": "The base location for synchronizing files, artifacts and state. Defaults to `/Users/jane@doe.com/.bundle/${bundle.name}/${bundle.target}`" }, "state_path": { "description": "The remote path to synchronize bundle state to. This defaults to `${workspace.root}/state`" diff --git a/bundle/tests/autoload_git/databricks.yml b/bundle/tests/autoload_git/databricks.yml index ba4785aed..92ab8d66a 100644 --- a/bundle/tests/autoload_git/databricks.yml +++ b/bundle/tests/autoload_git/databricks.yml @@ -1,7 +1,7 @@ bundle: name: autoload git config test -environments: +targets: development: default: true diff --git a/bundle/tests/environment_empty/databricks.yml b/bundle/tests/environment_empty/databricks.yml deleted file mode 100644 index 17c03c8dc..000000000 --- a/bundle/tests/environment_empty/databricks.yml +++ /dev/null @@ -1,5 +0,0 @@ -bundle: - name: environment_empty - -environments: - development: diff --git a/bundle/tests/environment_empty_test.go b/bundle/tests/environment_empty_test.go deleted file mode 100644 index fb2e33416..000000000 --- a/bundle/tests/environment_empty_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package config_tests - -import ( - "testing" - - "github.com/stretchr/testify/assert" -) - -func TestEnvironmentEmpty(t *testing.T) { - b := loadEnvironment(t, "./environment_empty", "development") - assert.Equal(t, "development", b.Config.Bundle.Environment) -} diff --git a/bundle/tests/environment_git_test.go b/bundle/tests/environment_git_test.go new file mode 100644 index 000000000..bb10825e4 --- /dev/null +++ b/bundle/tests/environment_git_test.go @@ -0,0 +1,20 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestGitAutoLoadWithEnvironment(t *testing.T) { + b := load(t, "./environments_autoload_git") + assert.True(t, b.Config.Bundle.Git.Inferred) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} + +func TestGitManuallySetBranchWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_autoload_git", "production") + assert.False(t, b.Config.Bundle.Git.Inferred) + assert.Equal(t, "main", b.Config.Bundle.Git.Branch) + assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") +} diff --git a/bundle/tests/environment_overrides_test.go b/bundle/tests/environment_overrides_test.go index 0a3f9fcd8..91dc2c811 100644 --- a/bundle/tests/environment_overrides_test.go +++ b/bundle/tests/environment_overrides_test.go @@ -7,17 +7,17 @@ import ( ) func TestEnvironmentOverridesWorkspaceDev(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides/workspace", "development") + b := loadTarget(t, "./environment_overrides/workspace", "development") assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host) } func TestEnvironmentOverridesWorkspaceStaging(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides/workspace", "staging") + b := loadTarget(t, "./environment_overrides/workspace", "staging") assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host) } func TestEnvironmentOverridesResourcesDev(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides/resources", "development") + b := loadTarget(t, "./environment_overrides/resources", "development") assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) // Base values are preserved in the development environment. @@ -26,7 +26,7 @@ func TestEnvironmentOverridesResourcesDev(t *testing.T) { } func TestEnvironmentOverridesResourcesStaging(t *testing.T) { - b := loadEnvironment(t, "./environment_overrides/resources", "staging") + b := loadTarget(t, "./environment_overrides/resources", "staging") assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) // Overrides are only applied if they are not zero-valued. diff --git a/bundle/tests/environments_autoload_git/databricks.yml b/bundle/tests/environments_autoload_git/databricks.yml new file mode 100644 index 000000000..ba4785aed --- /dev/null +++ b/bundle/tests/environments_autoload_git/databricks.yml @@ -0,0 +1,11 @@ +bundle: + name: autoload git config test + +environments: + development: + default: true + + production: + # production can only be deployed from the 'main' branch + git: + branch: main diff --git a/bundle/tests/environments_job_and_pipeline/databricks.yml b/bundle/tests/environments_job_and_pipeline/databricks.yml new file mode 100644 index 000000000..e29fa0349 --- /dev/null +++ b/bundle/tests/environments_job_and_pipeline/databricks.yml @@ -0,0 +1,44 @@ +resources: + pipelines: + nyc_taxi_pipeline: + name: "nyc taxi loader" + libraries: + - notebook: + path: ./dlt/nyc_taxi_loader + +environments: + development: + mode: development + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_development + development: true + + staging: + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_staging + development: false + + production: + mode: production + resources: + pipelines: + nyc_taxi_pipeline: + target: nyc_taxi_production + development: false + photon: true + + jobs: + pipeline_schedule: + name: Daily refresh of production pipeline + + schedule: + quartz_cron_expression: 6 6 11 * * ? + timezone_id: UTC + + tasks: + - pipeline_task: + pipeline_id: "to be interpolated" diff --git a/bundle/tests/environments_job_and_pipeline_test.go b/bundle/tests/environments_job_and_pipeline_test.go new file mode 100644 index 000000000..a18daf90c --- /dev/null +++ b/bundle/tests/environments_job_and_pipeline_test.go @@ -0,0 +1,56 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestJobAndPipelineDevelopmentWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "development") + assert.Len(t, b.Config.Resources.Jobs, 0) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, b.Config.Bundle.Mode, config.Development) + assert.True(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_development", p.Target) +} + +func TestJobAndPipelineStagingWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "staging") + assert.Len(t, b.Config.Resources.Jobs, 0) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.False(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_staging", p.Target) +} + +func TestJobAndPipelineProductionWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_job_and_pipeline", "production") + assert.Len(t, b.Config.Resources.Jobs, 1) + assert.Len(t, b.Config.Resources.Pipelines, 1) + + p := b.Config.Resources.Pipelines["nyc_taxi_pipeline"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.False(t, p.Development) + require.Len(t, p.Libraries, 1) + assert.Equal(t, "./dlt/nyc_taxi_loader", p.Libraries[0].Notebook.Path) + assert.Equal(t, "nyc_taxi_production", p.Target) + + j := b.Config.Resources.Jobs["pipeline_schedule"] + assert.Equal(t, "environments_job_and_pipeline/databricks.yml", filepath.ToSlash(j.ConfigFilePath)) + assert.Equal(t, "Daily refresh of production pipeline", j.Name) + require.Len(t, j.Tasks, 1) + assert.NotEmpty(t, j.Tasks[0].PipelineTask.PipelineId) +} diff --git a/bundle/tests/environments_override_job_cluster/databricks.yml b/bundle/tests/environments_override_job_cluster/databricks.yml new file mode 100644 index 000000000..33061b2e3 --- /dev/null +++ b/bundle/tests/environments_override_job_cluster/databricks.yml @@ -0,0 +1,35 @@ +bundle: + name: override_job_cluster + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + foo: + name: job + job_clusters: + - job_cluster_key: key + new_cluster: + spark_version: 13.3.x-scala2.12 + +environments: + development: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.xlarge + num_workers: 1 + + staging: + resources: + jobs: + foo: + job_clusters: + - job_cluster_key: key + new_cluster: + node_type_id: i3.2xlarge + num_workers: 4 diff --git a/bundle/tests/environments_override_job_cluster_test.go b/bundle/tests/environments_override_job_cluster_test.go new file mode 100644 index 000000000..b3ec74453 --- /dev/null +++ b/bundle/tests/environments_override_job_cluster_test.go @@ -0,0 +1,29 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOverrideJobClusterDevWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_override_job_cluster", "development") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 1, c.NewCluster.NumWorkers) +} + +func TestOverrideJobClusterStagingWithEnvironment(t *testing.T) { + b := loadTarget(t, "./environments_override_job_cluster", "staging") + assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) + assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) + + c := b.Config.Resources.Jobs["foo"].JobClusters[0] + assert.Equal(t, "13.3.x-scala2.12", c.NewCluster.SparkVersion) + assert.Equal(t, "i3.2xlarge", c.NewCluster.NodeTypeId) + assert.Equal(t, 4, c.NewCluster.NumWorkers) +} diff --git a/bundle/tests/git_test.go b/bundle/tests/git_test.go index daab4d30a..c5ae83a20 100644 --- a/bundle/tests/git_test.go +++ b/bundle/tests/git_test.go @@ -17,7 +17,7 @@ func TestGitAutoLoad(t *testing.T) { } func TestGitManuallySetBranch(t *testing.T) { - b := loadEnvironment(t, "./autoload_git", "production") + b := loadTarget(t, "./autoload_git", "production") assert.False(t, b.Config.Bundle.Git.Inferred) assert.Equal(t, "main", b.Config.Bundle.Git.Branch) assert.Contains(t, b.Config.Bundle.Git.OriginURL, "/cli") diff --git a/bundle/tests/interpolation_target/databricks.yml b/bundle/tests/interpolation_target/databricks.yml new file mode 100644 index 000000000..ad4ebe199 --- /dev/null +++ b/bundle/tests/interpolation_target/databricks.yml @@ -0,0 +1,14 @@ +bundle: + name: foo ${workspace.profile} + +workspace: + profile: bar + +targets: + development: + default: true + +resources: + jobs: + my_job: + name: "${bundle.name} | ${workspace.profile} | ${bundle.environment} | ${bundle.target}" diff --git a/bundle/tests/interpolation_test.go b/bundle/tests/interpolation_test.go index 47b0c775f..837891a07 100644 --- a/bundle/tests/interpolation_test.go +++ b/bundle/tests/interpolation_test.go @@ -20,3 +20,15 @@ func TestInterpolation(t *testing.T) { assert.Equal(t, "foo bar", b.Config.Bundle.Name) assert.Equal(t, "foo bar | bar", b.Config.Resources.Jobs["my_job"].Name) } + +func TestInterpolationWithTarget(t *testing.T) { + b := loadTarget(t, "./interpolation_target", "development") + err := bundle.Apply(context.Background(), b, interpolation.Interpolate( + interpolation.IncludeLookupsInPath("bundle"), + interpolation.IncludeLookupsInPath("workspace"), + )) + require.NoError(t, err) + assert.Equal(t, "foo bar", b.Config.Bundle.Name) + assert.Equal(t, "foo bar | bar | development | development", b.Config.Resources.Jobs["my_job"].Name) + +} diff --git a/bundle/tests/job_and_pipeline/databricks.yml b/bundle/tests/job_and_pipeline/databricks.yml index e29fa0349..67d306ffe 100644 --- a/bundle/tests/job_and_pipeline/databricks.yml +++ b/bundle/tests/job_and_pipeline/databricks.yml @@ -6,7 +6,7 @@ resources: - notebook: path: ./dlt/nyc_taxi_loader -environments: +targets: development: mode: development resources: diff --git a/bundle/tests/job_and_pipeline_test.go b/bundle/tests/job_and_pipeline_test.go index d92eabd3b..5e8febc33 100644 --- a/bundle/tests/job_and_pipeline_test.go +++ b/bundle/tests/job_and_pipeline_test.go @@ -10,7 +10,7 @@ import ( ) func TestJobAndPipelineDevelopment(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "development") + b := loadTarget(t, "./job_and_pipeline", "development") assert.Len(t, b.Config.Resources.Jobs, 0) assert.Len(t, b.Config.Resources.Pipelines, 1) @@ -24,7 +24,7 @@ func TestJobAndPipelineDevelopment(t *testing.T) { } func TestJobAndPipelineStaging(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "staging") + b := loadTarget(t, "./job_and_pipeline", "staging") assert.Len(t, b.Config.Resources.Jobs, 0) assert.Len(t, b.Config.Resources.Pipelines, 1) @@ -37,7 +37,7 @@ func TestJobAndPipelineStaging(t *testing.T) { } func TestJobAndPipelineProduction(t *testing.T) { - b := loadEnvironment(t, "./job_and_pipeline", "production") + b := loadTarget(t, "./job_and_pipeline", "production") assert.Len(t, b.Config.Resources.Jobs, 1) assert.Len(t, b.Config.Resources.Pipelines, 1) diff --git a/bundle/tests/loader.go b/bundle/tests/loader.go index 056a82d91..f23b10764 100644 --- a/bundle/tests/loader.go +++ b/bundle/tests/loader.go @@ -18,9 +18,9 @@ func load(t *testing.T, path string) *bundle.Bundle { return b } -func loadEnvironment(t *testing.T, path, env string) *bundle.Bundle { +func loadTarget(t *testing.T, path, env string) *bundle.Bundle { b := load(t, path) - err := bundle.Apply(context.Background(), b, mutator.SelectEnvironment(env)) + err := bundle.Apply(context.Background(), b, mutator.SelectTarget(env)) require.NoError(t, err) return b } diff --git a/bundle/tests/override_job_cluster/databricks.yml b/bundle/tests/override_job_cluster/databricks.yml index 33061b2e3..a85b3b711 100644 --- a/bundle/tests/override_job_cluster/databricks.yml +++ b/bundle/tests/override_job_cluster/databricks.yml @@ -13,7 +13,7 @@ resources: new_cluster: spark_version: 13.3.x-scala2.12 -environments: +targets: development: resources: jobs: diff --git a/bundle/tests/override_job_cluster_test.go b/bundle/tests/override_job_cluster_test.go index 97f7c04ee..1393e03e5 100644 --- a/bundle/tests/override_job_cluster_test.go +++ b/bundle/tests/override_job_cluster_test.go @@ -7,7 +7,7 @@ import ( ) func TestOverrideJobClusterDev(t *testing.T) { - b := loadEnvironment(t, "./override_job_cluster", "development") + b := loadTarget(t, "./override_job_cluster", "development") assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) @@ -18,7 +18,7 @@ func TestOverrideJobClusterDev(t *testing.T) { } func TestOverrideJobClusterStaging(t *testing.T) { - b := loadEnvironment(t, "./override_job_cluster", "staging") + b := loadTarget(t, "./override_job_cluster", "staging") assert.Equal(t, "job", b.Config.Resources.Jobs["foo"].Name) assert.Len(t, b.Config.Resources.Jobs["foo"].JobClusters, 1) diff --git a/bundle/tests/target_empty/databricks.yml b/bundle/tests/target_empty/databricks.yml new file mode 100644 index 000000000..cd415377c --- /dev/null +++ b/bundle/tests/target_empty/databricks.yml @@ -0,0 +1,5 @@ +bundle: + name: target_empty + +targets: + development: diff --git a/bundle/tests/target_empty_test.go b/bundle/tests/target_empty_test.go new file mode 100644 index 000000000..88705d8bb --- /dev/null +++ b/bundle/tests/target_empty_test.go @@ -0,0 +1,12 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTargetEmpty(t *testing.T) { + b := loadTarget(t, "./target_empty", "development") + assert.Equal(t, "development", b.Config.Bundle.Target) +} diff --git a/bundle/tests/target_overrides/resources/databricks.yml b/bundle/tests/target_overrides/resources/databricks.yml new file mode 100644 index 000000000..f6e2a7edb --- /dev/null +++ b/bundle/tests/target_overrides/resources/databricks.yml @@ -0,0 +1,20 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +resources: + jobs: + job1: + name: "base job" + +targets: + development: + default: true + + staging: + resources: + jobs: + job1: + name: "staging job" diff --git a/bundle/tests/target_overrides/workspace/databricks.yml b/bundle/tests/target_overrides/workspace/databricks.yml new file mode 100644 index 000000000..8c4f9487e --- /dev/null +++ b/bundle/tests/target_overrides/workspace/databricks.yml @@ -0,0 +1,14 @@ +bundle: + name: environment_overrides + +workspace: + host: https://acme.cloud.databricks.com/ + +targets: + development: + workspace: + host: https://development.acme.cloud.databricks.com/ + + staging: + workspace: + host: https://staging.acme.cloud.databricks.com/ diff --git a/bundle/tests/target_overrides_test.go b/bundle/tests/target_overrides_test.go new file mode 100644 index 000000000..2516ce2a3 --- /dev/null +++ b/bundle/tests/target_overrides_test.go @@ -0,0 +1,27 @@ +package config_tests + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestTargetOverridesWorkspaceDev(t *testing.T) { + b := loadTarget(t, "./target_overrides/workspace", "development") + assert.Equal(t, "https://development.acme.cloud.databricks.com/", b.Config.Workspace.Host) +} + +func TestTargetOverridesWorkspaceStaging(t *testing.T) { + b := loadTarget(t, "./target_overrides/workspace", "staging") + assert.Equal(t, "https://staging.acme.cloud.databricks.com/", b.Config.Workspace.Host) +} + +func TestTargetOverridesResourcesDev(t *testing.T) { + b := loadTarget(t, "./target_overrides/resources", "development") + assert.Equal(t, "base job", b.Config.Resources.Jobs["job1"].Name) +} + +func TestTargetOverridesResourcesStaging(t *testing.T) { + b := loadTarget(t, "./target_overrides/resources", "staging") + assert.Equal(t, "staging job", b.Config.Resources.Jobs["job1"].Name) +} diff --git a/bundle/tests/variables/env_overrides/databricks.yml b/bundle/tests/variables/env_overrides/databricks.yml index 1fec10733..2157596c3 100644 --- a/bundle/tests/variables/env_overrides/databricks.yml +++ b/bundle/tests/variables/env_overrides/databricks.yml @@ -12,7 +12,7 @@ bundle: workspace: profile: ${var.a} ${var.b} -environments: +targets: env-with-single-variable-override: variables: b: dev-b diff --git a/bundle/tests/variables_test.go b/bundle/tests/variables_test.go index 365ffbd4b..93c822505 100644 --- a/bundle/tests/variables_test.go +++ b/bundle/tests/variables_test.go @@ -34,10 +34,10 @@ func TestVariablesLoadingFailsWhenRequiredVariableIsNotSpecified(t *testing.T) { assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } -func TestVariablesEnvironmentsBlockOverride(t *testing.T) { +func TestVariablesTargetsBlockOverride(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-single-variable-override"), + mutator.SelectTarget("env-with-single-variable-override"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -46,10 +46,10 @@ func TestVariablesEnvironmentsBlockOverride(t *testing.T) { assert.Equal(t, "default-a dev-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideForMultipleVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-two-variable-overrides"), + mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -58,11 +58,11 @@ func TestVariablesEnvironmentsBlockOverrideForMultipleVariables(t *testing.T) { assert.Equal(t, "prod-a prod-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithProcessEnvVars(t *testing.T) { t.Setenv("BUNDLE_VAR_b", "env-var-b") b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-with-two-variable-overrides"), + mutator.SelectTarget("env-with-two-variable-overrides"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -71,10 +71,10 @@ func TestVariablesEnvironmentsBlockOverrideWithProcessEnvVars(t *testing.T) { assert.Equal(t, "prod-a env-var-b", b.Config.Workspace.Profile) } -func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithMissingVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-missing-a-required-variable-assignment"), + mutator.SelectTarget("env-missing-a-required-variable-assignment"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), @@ -82,10 +82,10 @@ func TestVariablesEnvironmentsBlockOverrideWithMissingVariables(t *testing.T) { assert.ErrorContains(t, err, "no value assigned to required variable b. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_b environment variable") } -func TestVariablesEnvironmentsBlockOverrideWithUndefinedVariables(t *testing.T) { +func TestVariablesTargetsBlockOverrideWithUndefinedVariables(t *testing.T) { b := load(t, "./variables/env_overrides") err := bundle.Apply(context.Background(), b, bundle.Seq( - mutator.SelectEnvironment("env-using-an-undefined-variable"), + mutator.SelectTarget("env-using-an-undefined-variable"), mutator.SetVariables(), interpolation.Interpolate( interpolation.IncludeLookupsInPath(variable.VariableReferencePrefix), diff --git a/cmd/bundle/variables.go b/cmd/bundle/variables.go index 33f557cc1..c3e4af645 100644 --- a/cmd/bundle/variables.go +++ b/cmd/bundle/variables.go @@ -7,7 +7,7 @@ import ( ) func ConfigureBundleWithVariables(cmd *cobra.Command, args []string) error { - // Load bundle config and apply environment + // Load bundle config and apply target err := root.MustConfigureBundle(cmd, args) if err != nil { return err diff --git a/cmd/configure/configure.go b/cmd/configure/configure.go index c51fd8300..0c1e40521 100644 --- a/cmd/configure/configure.go +++ b/cmd/configure/configure.go @@ -131,7 +131,7 @@ func newConfigureCommand() *cobra.Command { // Include token flag for compatibility with the legacy CLI. // It doesn't actually do anything because we always use PATs. - cmd.Flags().BoolP("token", "t", true, "Configure using Databricks Personal Access Token") + cmd.Flags().Bool("token", true, "Configure using Databricks Personal Access Token") cmd.Flags().MarkHidden("token") cmd.RunE = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index f691bbfc2..e1c123365 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -11,11 +11,12 @@ import ( ) const envName = "DATABRICKS_BUNDLE_ENV" +const targetName = "DATABRICKS_BUNDLE_TARGET" -// getEnvironment returns the name of the environment to operate in. -func getEnvironment(cmd *cobra.Command) (value string) { +// getTarget returns the name of the target to operate in. +func getTarget(cmd *cobra.Command) (value string) { // The command line flag takes precedence. - flag := cmd.Flag("environment") + flag := cmd.Flag("target") if flag != nil { value = flag.Value.String() if value != "" { @@ -23,8 +24,23 @@ func getEnvironment(cmd *cobra.Command) (value string) { } } + oldFlag := cmd.Flag("environment") + if oldFlag != nil { + value = flag.Value.String() + if value != "" { + return + } + } + // If it's not set, use the environment variable. - return os.Getenv(envName) + target := os.Getenv(targetName) + // If target env is not set with a new variable, try to check for old variable name + // TODO: remove when environments section is not supported anymore + if target == "" { + target = os.Getenv(envName) + } + + return target } func getProfile(cmd *cobra.Command) (value string) { @@ -80,11 +96,11 @@ func configureBundle(cmd *cobra.Command, args []string, load func(ctx context.Co } var m bundle.Mutator - env := getEnvironment(cmd) + env := getTarget(cmd) if env == "" { - m = mutator.SelectDefaultEnvironment() + m = mutator.SelectDefaultTarget() } else { - m = mutator.SelectEnvironment(env) + m = mutator.SelectTarget(env) } ctx := cmd.Context() @@ -108,19 +124,27 @@ func TryConfigureBundle(cmd *cobra.Command, args []string) error { return configureBundle(cmd, args, bundle.TryLoad) } -// environmentCompletion executes to autocomplete the argument to the environment flag. -func environmentCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { +// targetCompletion executes to autocomplete the argument to the target flag. +func targetCompletion(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { b, err := loadBundle(cmd, args, bundle.MustLoad) if err != nil { cobra.CompErrorln(err.Error()) return nil, cobra.ShellCompDirectiveError } - return maps.Keys(b.Config.Environments), cobra.ShellCompDirectiveDefault + return maps.Keys(b.Config.Targets), cobra.ShellCompDirectiveDefault } +func initTargetFlag(cmd *cobra.Command) { + // To operate in the context of a bundle, all commands must take an "target" parameter. + cmd.PersistentFlags().StringP("target", "t", "", "bundle target to use (if applicable)") + cmd.RegisterFlagCompletionFunc("target", targetCompletion) +} + +// DEPRECATED flag func initEnvironmentFlag(cmd *cobra.Command) { // To operate in the context of a bundle, all commands must take an "environment" parameter. - cmd.PersistentFlags().StringP("environment", "e", "", "bundle environment to use (if applicable)") - cmd.RegisterFlagCompletionFunc("environment", environmentCompletion) + cmd.PersistentFlags().StringP("environment", "e", "", "bundle target to use (if applicable)") + cmd.PersistentFlags().MarkDeprecated("environment", "use --target flag instead") + cmd.RegisterFlagCompletionFunc("environment", targetCompletion) } diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 4382cf22f..8aff9018f 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -128,3 +128,27 @@ func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { b.WorkspaceClient() }) } + +func TestTargetFlagFull(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + cmd.SetArgs([]string{"version", "--target", "development"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, cmd.Flag("target").Value.String(), "development") +} + +func TestTargetFlagShort(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + cmd.SetArgs([]string{"version", "-t", "production"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, cmd.Flag("target").Value.String(), "production") +} diff --git a/cmd/root/root.go b/cmd/root/root.go index 48868b41f..c71cf9eac 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -36,6 +36,7 @@ func New() *cobra.Command { outputFlag := initOutputFlag(cmd) initProfileFlag(cmd) initEnvironmentFlag(cmd) + initTargetFlag(cmd) cmd.PersistentPreRunE = func(cmd *cobra.Command, args []string) error { ctx := cmd.Context() diff --git a/cmd/sync/sync_test.go b/cmd/sync/sync_test.go index a6eedbe6e..06e97540f 100644 --- a/cmd/sync/sync_test.go +++ b/cmd/sync/sync_test.go @@ -18,7 +18,7 @@ func TestSyncOptionsFromBundle(t *testing.T) { Path: tempDir, Bundle: config.Bundle{ - Environment: "default", + Target: "default", }, Workspace: config.Workspace{ From 042fbaa61414f818ed2a9cb3bf60afd49e348c10 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 17 Aug 2023 22:32:30 +0200 Subject: [PATCH 068/139] Rename init project-dir flag to output-dir (#676) ## Changes This PR: 1. Renames the project-dir flag to output-dir 2. Makes the project dir flag optional. When unspecified we default to the current working directory. ## Tests Manually --------- Co-authored-by: Pieter Noordhuis --- cmd/bundle/init.go | 9 ++++----- libs/template/materialize.go | 6 +++--- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index e3d76ecf2..14c575827 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -42,10 +42,9 @@ func newInitCommand() *cobra.Command { } var configFile string - var projectDir string + var outputDir string cmd.Flags().StringVar(&configFile, "config-file", "", "File containing input parameters for template initialization.") - cmd.Flags().StringVar(&projectDir, "project-dir", "", "The project will be initialized in this directory.") - cmd.MarkFlagRequired("project-dir") + cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to.") cmd.RunE = func(cmd *cobra.Command, args []string) error { templatePath := args[0] @@ -54,7 +53,7 @@ func newInitCommand() *cobra.Command { if !isRepoUrl(templatePath) { // skip downloading the repo because input arg is not a URL. We assume // it's a path on the local file system in that case - return template.Materialize(ctx, configFile, templatePath, projectDir) + return template.Materialize(ctx, configFile, templatePath, outputDir) } // Download the template in a temporary directory @@ -72,7 +71,7 @@ func newInitCommand() *cobra.Command { } defer os.RemoveAll(templateDir) - return template.Materialize(ctx, configFile, templateDir, projectDir) + return template.Materialize(ctx, configFile, templateDir, outputDir) } return cmd diff --git a/libs/template/materialize.go b/libs/template/materialize.go index bbc9e8da3..426646c33 100644 --- a/libs/template/materialize.go +++ b/libs/template/materialize.go @@ -16,8 +16,8 @@ const schemaFileName = "databricks_template_schema.json" // ctx: context containing a cmdio object. This is used to prompt the user // configFilePath: file path containing user defined config values // templateRoot: root of the template definition -// projectDir: root of directory where to initialize the project -func Materialize(ctx context.Context, configFilePath, templateRoot, projectDir string) error { +// outputDir: root of directory where to initialize the template +func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir string) error { templatePath := filepath.Join(templateRoot, templateDirName) libraryPath := filepath.Join(templateRoot, libraryDirName) schemaPath := filepath.Join(templateRoot, schemaFileName) @@ -48,7 +48,7 @@ func Materialize(ctx context.Context, configFilePath, templateRoot, projectDir s } // Walk and render the template, since input configuration is complete - r, err := newRenderer(ctx, config.values, templatePath, libraryPath, projectDir) + r, err := newRenderer(ctx, config.values, templatePath, libraryPath, outputDir) if err != nil { return err } From e3e9bc6def4e55603a316d0f155664d6bacdb11d Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 18 Aug 2023 10:07:25 +0200 Subject: [PATCH 069/139] Added support for sync.include and sync.exclude sections (#671) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes Added support for `sync.include` and `sync.exclude` sections ## Tests Added `sample-java` folder to gitignore ``` bundle: name: wheel-task sync: include: - "./sample-java/*.kts" ``` Kotlin files were correctly synced. ``` [DEBUG] Test execution command: /opt/homebrew/opt/go@1.21/bin/go test ./... -json -timeout 1h -coverpkg=./... -coverprofile=coverage.txt -run ^TestAcc [DEBUG] Test execution directory: /Users/andrew.nester/cli 2023/08/17 17:12:10 [INFO] ✅ TestAccAlertsCreateErrWhenNoArguments (2.320s) 2023/08/17 17:12:10 [INFO] ✅ TestAccApiGet (0.650s) 2023/08/17 17:12:12 [INFO] ✅ TestAccClustersList (1.060s) 2023/08/17 17:12:12 [INFO] ✅ TestAccClustersGet (0.760s) 2023/08/17 17:12:26 [INFO] ✅ TestAccFilerWorkspaceFilesReadWrite (13.270s) 2023/08/17 17:12:32 [INFO] ✅ TestAccFilerWorkspaceFilesReadDir (6.860s) 2023/08/17 17:12:46 [INFO] ✅ TestAccFilerDbfsReadWrite (13.380s) 2023/08/17 17:12:53 [INFO] ✅ TestAccFilerDbfsReadDir (7.460s) 2023/08/17 17:13:01 [INFO] ✅ TestAccFilerWorkspaceNotebookConflict (7.920s) 2023/08/17 17:13:10 [INFO] ✅ TestAccFilerWorkspaceNotebookWithOverwriteFlag (9.290s) 2023/08/17 17:13:10 [INFO] ✅ TestAccFilerLocalReadWrite (0.010s) 2023/08/17 17:13:11 [INFO] ✅ TestAccFilerLocalReadDir (0.010s) 2023/08/17 17:13:14 [INFO] ✅ TestAccFsCatForDbfs (3.180s) 2023/08/17 17:13:15 [INFO] ✅ TestAccFsCatForDbfsOnNonExistentFile (0.940s) 2023/08/17 17:13:15 [INFO] ✅ TestAccFsCatForDbfsInvalidScheme (0.560s) 2023/08/17 17:13:18 [INFO] ✅ TestAccFsCatDoesNotSupportOutputModeJson (2.910s) 2023/08/17 17:13:51 [INFO] ✅ TestAccFsCpDir (32.730s) 2023/08/17 17:14:06 [INFO] ✅ TestAccFsCpFileToFile (14.740s) 2023/08/17 17:14:20 [INFO] ✅ TestAccFsCpFileToDir (14.340s) 2023/08/17 17:14:53 [INFO] ✅ TestAccFsCpDirToDirFileNotOverwritten (32.710s) 2023/08/17 17:15:12 [INFO] ✅ TestAccFsCpFileToDirFileNotOverwritten (19.590s) 2023/08/17 17:15:32 [INFO] ✅ TestAccFsCpFileToFileFileNotOverwritten (19.950s) 2023/08/17 17:16:11 [INFO] ✅ TestAccFsCpDirToDirWithOverwriteFlag (38.970s) 2023/08/17 17:16:32 [INFO] ✅ TestAccFsCpFileToFileWithOverwriteFlag (21.040s) 2023/08/17 17:16:52 [INFO] ✅ TestAccFsCpFileToDirWithOverwriteFlag (19.670s) 2023/08/17 17:16:54 [INFO] ✅ TestAccFsCpErrorsWhenSourceIsDirWithoutRecursiveFlag (1.890s) 2023/08/17 17:16:54 [INFO] ✅ TestAccFsCpErrorsOnInvalidScheme (0.690s) 2023/08/17 17:17:10 [INFO] ✅ TestAccFsCpSourceIsDirectoryButTargetIsFile (15.810s) 2023/08/17 17:17:14 [INFO] ✅ TestAccFsLsForDbfs (4.000s) 2023/08/17 17:17:18 [INFO] ✅ TestAccFsLsForDbfsWithAbsolutePaths (4.000s) 2023/08/17 17:17:21 [INFO] ✅ TestAccFsLsForDbfsOnFile (3.140s) 2023/08/17 17:17:23 [INFO] ✅ TestAccFsLsForDbfsOnEmptyDir (2.030s) 2023/08/17 17:17:24 [INFO] ✅ TestAccFsLsForDbfsForNonexistingDir (0.840s) 2023/08/17 17:17:25 [INFO] ✅ TestAccFsLsWithoutScheme (0.590s) 2023/08/17 17:17:27 [INFO] ✅ TestAccFsMkdirCreatesDirectory (2.310s) 2023/08/17 17:17:30 [INFO] ✅ TestAccFsMkdirCreatesMultipleDirectories (2.800s) 2023/08/17 17:17:33 [INFO] ✅ TestAccFsMkdirWhenDirectoryAlreadyExists (2.700s) 2023/08/17 17:17:35 [INFO] ✅ TestAccFsMkdirWhenFileExistsAtPath (2.870s) 2023/08/17 17:17:40 [INFO] ✅ TestAccFsRmForFile (4.030s) 2023/08/17 17:17:43 [INFO] ✅ TestAccFsRmForEmptyDirectory (3.470s) 2023/08/17 17:17:46 [INFO] ✅ TestAccFsRmForNonEmptyDirectory (3.350s) 2023/08/17 17:17:47 [INFO] ✅ TestAccFsRmForNonExistentFile (0.940s) 2023/08/17 17:17:51 [INFO] ✅ TestAccFsRmForNonEmptyDirectoryWithRecursiveFlag (3.570s) 2023/08/17 17:17:52 [INFO] ✅ TestAccGitClone (0.890s) 2023/08/17 17:17:52 [INFO] ✅ TestAccGitCloneWithOnlyRepoNameOnAlternateBranch (0.730s) 2023/08/17 17:17:53 [INFO] ✅ TestAccGitCloneErrorsWhenRepositoryDoesNotExist (0.540s) 2023/08/17 17:18:02 [INFO] ✅ TestAccLock (8.800s) 2023/08/17 17:18:06 [INFO] ✅ TestAccLockUnlockWithoutAllowsLockFileNotExist (3.930s) 2023/08/17 17:18:09 [INFO] ✅ TestAccLockUnlockWithAllowsLockFileNotExist (3.320s) 2023/08/17 17:18:20 [INFO] ✅ TestAccSyncFullFileSync (10.570s) 2023/08/17 17:18:31 [INFO] ✅ TestAccSyncIncrementalFileSync (11.460s) 2023/08/17 17:18:42 [INFO] ✅ TestAccSyncNestedFolderSync (10.850s) 2023/08/17 17:18:53 [INFO] ✅ TestAccSyncNestedFolderDoesntFailOnNonEmptyDirectory (10.650s) 2023/08/17 17:19:04 [INFO] ✅ TestAccSyncNestedSpacePlusAndHashAreEscapedSync (10.930s) 2023/08/17 17:19:11 [INFO] ✅ TestAccSyncIncrementalFileOverwritesFolder (7.010s) 2023/08/17 17:19:18 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookToFile (7.380s) 2023/08/17 17:19:24 [INFO] ✅ TestAccSyncIncrementalSyncFileToPythonNotebook (6.220s) 2023/08/17 17:19:30 [INFO] ✅ TestAccSyncIncrementalSyncPythonNotebookDelete (5.530s) 2023/08/17 17:19:32 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoDoesntExist (2.620s) 2023/08/17 17:19:38 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableIfRepoExists (5.460s) 2023/08/17 17:19:40 [INFO] ✅ TestAccSyncEnsureRemotePathIsUsableInWorkspace (1.850s) 2023/08/17 17:19:40 [INFO] ✅ TestAccWorkspaceList (0.780s) 2023/08/17 17:19:51 [INFO] ✅ TestAccExportDir (10.350s) 2023/08/17 17:19:54 [INFO] ✅ TestAccExportDirDoesNotOverwrite (3.330s) 2023/08/17 17:19:58 [INFO] ✅ TestAccExportDirWithOverwriteFlag (3.770s) 2023/08/17 17:20:07 [INFO] ✅ TestAccImportDir (9.320s) 2023/08/17 17:20:24 [INFO] ✅ TestAccImportDirDoesNotOverwrite (16.950s) 2023/08/17 17:20:35 [INFO] ✅ TestAccImportDirWithOverwriteFlag (10.620s) 2023/08/17 17:20:35 [INFO] ✅ 68/68 passed, 0 failed, 3 skipped ``` --- bundle/bundle.go | 34 +++++++++++ bundle/config/root.go | 3 + bundle/config/sync.go | 13 ++++ bundle/deploy/files/sync.go | 12 +++- cmd/bundle/sync.go | 7 +++ cmd/sync/sync.go | 7 +++ libs/fileset/glob.go | 49 +++++++++++++++ libs/fileset/glob_test.go | 65 ++++++++++++++++++++ libs/set/set.go | 75 +++++++++++++++++++++++ libs/set/set_test.go | 111 +++++++++++++++++++++++++++++++++ libs/sync/sync.go | 72 ++++++++++++++++++---- libs/sync/sync_test.go | 119 ++++++++++++++++++++++++++++++++++++ 12 files changed, 554 insertions(+), 13 deletions(-) create mode 100644 bundle/config/sync.go create mode 100644 libs/fileset/glob.go create mode 100644 libs/fileset/glob_test.go create mode 100644 libs/set/set.go create mode 100644 libs/set/set_test.go create mode 100644 libs/sync/sync_test.go diff --git a/bundle/bundle.go b/bundle/bundle.go index a5eaa2897..d69d58158 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -24,6 +24,8 @@ import ( "github.com/hashicorp/terraform-exec/tfexec" ) +const internalFolder = ".internal" + type Bundle struct { Config config.Root @@ -155,6 +157,38 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) { return dir, nil } +// This directory is used to store and automaticaly sync internal bundle files, such as, f.e +// notebook trampoline files for Python wheel and etc. +func (b *Bundle) InternalDir() (string, error) { + cacheDir, err := b.CacheDir() + if err != nil { + return "", err + } + + dir := filepath.Join(cacheDir, internalFolder) + err = os.MkdirAll(dir, 0700) + if err != nil { + return dir, err + } + + return dir, nil +} + +// GetSyncIncludePatterns returns a list of user defined includes +// And also adds InternalDir folder to include list for sync command +// so this folder is always synced +func (b *Bundle) GetSyncIncludePatterns() ([]string, error) { + internalDir, err := b.InternalDir() + if err != nil { + return nil, err + } + internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + if err != nil { + return nil, err + } + return append(b.Config.Sync.Include, filepath.ToSlash(filepath.Join(internalDirRel, "*.*"))), nil +} + func (b *Bundle) GitRepository() (*git.Repository, error) { rootPath, err := folders.FindDirWithLeaf(b.Config.Path, ".git") if err != nil { diff --git a/bundle/config/root.go b/bundle/config/root.go index 24426dd89..e0d20425b 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -77,6 +77,9 @@ type Root struct { // DEPRECATED. Left for backward compatibility with Targets Environments map[string]*Target `json:"environments,omitempty"` + + // Sync section specifies options for files synchronization + Sync Sync `json:"sync"` } func Load(path string) (*Root, error) { diff --git a/bundle/config/sync.go b/bundle/config/sync.go new file mode 100644 index 000000000..0580e4c4f --- /dev/null +++ b/bundle/config/sync.go @@ -0,0 +1,13 @@ +package config + +type Sync struct { + // Include contains a list of globs evaluated relative to the bundle root path + // to explicitly include files that were excluded by the user's gitignore. + Include []string `json:"include,omitempty"` + + // Exclude contains a list of globs evaluated relative to the bundle root path + // to explicitly exclude files that were included by + // 1) the default that observes the user's gitignore, or + // 2) the `Include` field above. + Exclude []string `json:"exclude,omitempty"` +} diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index 84d79dc81..2dccd20a7 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -14,9 +14,17 @@ func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns() + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ - LocalPath: b.Config.Path, - RemotePath: b.Config.Workspace.FilesPath, + LocalPath: b.Config.Path, + RemotePath: b.Config.Workspace.FilesPath, + Include: includes, + Exclude: b.Config.Sync.Exclude, + Full: false, CurrentUser: b.Config.Workspace.CurrentUser.User, diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index 2fff7baf5..be45626a3 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -23,9 +23,16 @@ func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns() + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, + Include: includes, + Exclude: b.Config.Sync.Exclude, Full: f.full, PollInterval: f.interval, diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index d2aad0c3f..4a62123ba 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -35,9 +35,16 @@ func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, args []string, b * return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } + includes, err := b.GetSyncIncludePatterns() + if err != nil { + return nil, fmt.Errorf("cannot get list of sync includes: %w", err) + } + opts := sync.SyncOptions{ LocalPath: b.Config.Path, RemotePath: b.Config.Workspace.FilesPath, + Include: includes, + Exclude: b.Config.Sync.Exclude, Full: f.full, PollInterval: f.interval, diff --git a/libs/fileset/glob.go b/libs/fileset/glob.go new file mode 100644 index 000000000..7a9f130bd --- /dev/null +++ b/libs/fileset/glob.go @@ -0,0 +1,49 @@ +package fileset + +import ( + "io/fs" + "os" + "path/filepath" +) + +type GlobSet struct { + root string + patterns []string +} + +func NewGlobSet(root string, includes []string) (*GlobSet, error) { + absRoot, err := filepath.Abs(root) + if err != nil { + return nil, err + } + for k := range includes { + includes[k] = filepath.Join(absRoot, filepath.FromSlash(includes[k])) + } + return &GlobSet{absRoot, includes}, nil +} + +// Return all files which matches defined glob patterns +func (s *GlobSet) All() ([]File, error) { + files := make([]File, 0) + for _, pattern := range s.patterns { + matches, err := filepath.Glob(pattern) + if err != nil { + return files, err + } + + for _, match := range matches { + matchRel, err := filepath.Rel(s.root, match) + if err != nil { + return files, err + } + + stat, err := os.Stat(match) + if err != nil { + return files, err + } + files = append(files, File{fs.FileInfoToDirEntry(stat), match, matchRel}) + } + } + + return files, nil +} diff --git a/libs/fileset/glob_test.go b/libs/fileset/glob_test.go new file mode 100644 index 000000000..f6ac7e192 --- /dev/null +++ b/libs/fileset/glob_test.go @@ -0,0 +1,65 @@ +package fileset + +import ( + "io/fs" + "os" + "path/filepath" + "slices" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestGlobFileset(t *testing.T) { + cwd, err := os.Getwd() + require.NoError(t, err) + root := filepath.Join(cwd, "..", "filer") + + entries, err := os.ReadDir(root) + require.NoError(t, err) + + g, err := NewGlobSet(root, []string{ + "./*.go", + }) + require.NoError(t, err) + + files, err := g.All() + require.NoError(t, err) + + require.Equal(t, len(files), len(entries)) + for _, f := range files { + exists := slices.ContainsFunc(entries, func(de fs.DirEntry) bool { + return de.Name() == f.Name() + }) + require.True(t, exists) + } + + g, err = NewGlobSet(root, []string{ + "./*.js", + }) + require.NoError(t, err) + + files, err = g.All() + require.NoError(t, err) + require.Equal(t, len(files), 0) +} + +func TestGlobFilesetWithRelativeRoot(t *testing.T) { + root := filepath.Join("..", "filer") + + entries, err := os.ReadDir(root) + require.NoError(t, err) + + g, err := NewGlobSet(root, []string{ + "./*.go", + }) + require.NoError(t, err) + + files, err := g.All() + require.NoError(t, err) + + require.Equal(t, len(files), len(entries)) + for _, f := range files { + require.True(t, filepath.IsAbs(f.Absolute)) + } +} diff --git a/libs/set/set.go b/libs/set/set.go new file mode 100644 index 000000000..4798ed092 --- /dev/null +++ b/libs/set/set.go @@ -0,0 +1,75 @@ +package set + +import ( + "fmt" + + "golang.org/x/exp/maps" +) + +type hashFunc[T any] func(a T) string + +// Set struct represents set data structure +type Set[T any] struct { + key hashFunc[T] + data map[string]T +} + +// NewSetFromF initialise a new set with initial values and a hash function +// to define uniqueness of value +func NewSetFromF[T any](values []T, f hashFunc[T]) *Set[T] { + s := &Set[T]{ + key: f, + data: make(map[string]T), + } + + for _, v := range values { + s.Add(v) + } + + return s +} + +// NewSetF initialise a new empty and a hash function +// to define uniqueness of value +func NewSetF[T any](f hashFunc[T]) *Set[T] { + return NewSetFromF([]T{}, f) +} + +// NewSetFrom initialise a new set with initial values which are comparable +func NewSetFrom[T comparable](values []T) *Set[T] { + return NewSetFromF(values, func(item T) string { + return fmt.Sprintf("%v", item) + }) +} + +// NewSetFrom initialise a new empty set for comparable values +func NewSet[T comparable]() *Set[T] { + return NewSetFrom([]T{}) +} + +func (s *Set[T]) addOne(item T) { + s.data[s.key(item)] = item +} + +// Add one or multiple items to set +func (s *Set[T]) Add(items ...T) { + for _, i := range items { + s.addOne(i) + } +} + +// Remove an item from set. No-op if the item does not exist +func (s *Set[T]) Remove(item T) { + delete(s.data, s.key(item)) +} + +// Indicates if the item exists in the set +func (s *Set[T]) Has(item T) bool { + _, ok := s.data[s.key(item)] + return ok +} + +// Returns an iterable slice of values from set +func (s *Set[T]) Iter() []T { + return maps.Values(s.data) +} diff --git a/libs/set/set_test.go b/libs/set/set_test.go new file mode 100644 index 000000000..c2b6e25c8 --- /dev/null +++ b/libs/set/set_test.go @@ -0,0 +1,111 @@ +package set + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestSet(t *testing.T) { + s := NewSetFrom([]string{}) + require.ElementsMatch(t, []string{}, s.Iter()) + + s = NewSetFrom([]string{"a", "a", "a", "b", "b", "c", "d", "e"}) + require.ElementsMatch(t, []string{"a", "b", "c", "d", "e"}, s.Iter()) + + i := NewSetFrom([]int{1, 1, 2, 3, 4, 5, 7, 7, 7, 10, 11}) + require.ElementsMatch(t, []int{1, 2, 3, 4, 5, 7, 10, 11}, i.Iter()) + + f := NewSetFrom([]float32{1.1, 1.1, 2.0, 3.1, 4.5, 5.1, 7.1, 7.2, 7.1, 10.1, 11.0}) + require.ElementsMatch(t, []float32{1.1, 2.0, 3.1, 4.5, 5.1, 7.1, 7.2, 10.1, 11.0}, f.Iter()) +} + +type testStruct struct { + key string + value int +} + +func TestSetCustomKey(t *testing.T) { + s := NewSetF(func(item *testStruct) string { + return fmt.Sprintf("%s:%d", item.key, item.value) + }) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"b", 2}) + s.Add(&testStruct{"c", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"a", 1}) + s.Add(&testStruct{"c", 1}) + s.Add(&testStruct{"c", 3}) + + require.ElementsMatch(t, []*testStruct{ + {"a", 1}, + {"b", 2}, + {"c", 1}, + {"c", 3}, + }, s.Iter()) +} + +func TestSetAdd(t *testing.T) { + s := NewSet[string]() + s.Add("a") + s.Add("a") + s.Add("a") + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + require.ElementsMatch(t, []string{"a", "b", "c", "d"}, s.Iter()) +} + +func TestSetRemove(t *testing.T) { + s := NewSet[string]() + s.Add("a") + s.Add("a") + s.Add("a") + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + s.Remove("d") + s.Remove("d") + s.Remove("a") + + require.ElementsMatch(t, []string{"b", "c"}, s.Iter()) +} + +func TestSetHas(t *testing.T) { + s := NewSet[string]() + require.False(t, s.Has("a")) + + s.Add("a") + require.True(t, s.Has("a")) + + s.Add("a") + s.Add("a") + require.True(t, s.Has("a")) + + s.Add("b") + s.Add("c") + s.Add("c") + s.Add("d") + s.Add("d") + + require.True(t, s.Has("a")) + require.True(t, s.Has("b")) + require.True(t, s.Has("c")) + require.True(t, s.Has("d")) + + s.Remove("d") + s.Remove("a") + + require.False(t, s.Has("a")) + require.True(t, s.Has("b")) + require.True(t, s.Has("c")) + require.False(t, s.Has("d")) +} diff --git a/libs/sync/sync.go b/libs/sync/sync.go index a299214d0..8be478fc3 100644 --- a/libs/sync/sync.go +++ b/libs/sync/sync.go @@ -6,8 +6,10 @@ import ( "time" "github.com/databricks/cli/libs/filer" + "github.com/databricks/cli/libs/fileset" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/log" + "github.com/databricks/cli/libs/set" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/service/iam" ) @@ -15,6 +17,8 @@ import ( type SyncOptions struct { LocalPath string RemotePath string + Include []string + Exclude []string Full bool @@ -32,7 +36,10 @@ type SyncOptions struct { type Sync struct { *SyncOptions - fileSet *git.FileSet + fileSet *git.FileSet + includeFileSet *fileset.GlobSet + excludeFileSet *fileset.GlobSet + snapshot *Snapshot filer filer.Filer @@ -52,6 +59,16 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { return nil, err } + includeFileSet, err := fileset.NewGlobSet(opts.LocalPath, opts.Include) + if err != nil { + return nil, err + } + + excludeFileSet, err := fileset.NewGlobSet(opts.LocalPath, opts.Exclude) + if err != nil { + return nil, err + } + // Verify that the remote path we're about to synchronize to is valid and allowed. err = EnsureRemotePathIsUsable(ctx, opts.WorkspaceClient, opts.RemotePath, opts.CurrentUser) if err != nil { @@ -88,11 +105,13 @@ func New(ctx context.Context, opts SyncOptions) (*Sync, error) { return &Sync{ SyncOptions: &opts, - fileSet: fileSet, - snapshot: snapshot, - filer: filer, - notifier: &NopNotifier{}, - seq: 0, + fileSet: fileSet, + includeFileSet: includeFileSet, + excludeFileSet: excludeFileSet, + snapshot: snapshot, + filer: filer, + notifier: &NopNotifier{}, + seq: 0, }, nil } @@ -132,15 +151,12 @@ func (s *Sync) notifyComplete(ctx context.Context, d diff) { } func (s *Sync) RunOnce(ctx context.Context) error { - // tradeoff: doing portable monitoring only due to macOS max descriptor manual ulimit setting requirement - // https://github.com/gorakhargosh/watchdog/blob/master/src/watchdog/observers/kqueue.py#L394-L418 - all, err := s.fileSet.All() + files, err := getFileList(ctx, s) if err != nil { - log.Errorf(ctx, "cannot list files: %s", err) return err } - change, err := s.snapshot.diff(ctx, all) + change, err := s.snapshot.diff(ctx, files) if err != nil { return err } @@ -166,6 +182,40 @@ func (s *Sync) RunOnce(ctx context.Context) error { return nil } +func getFileList(ctx context.Context, s *Sync) ([]fileset.File, error) { + // tradeoff: doing portable monitoring only due to macOS max descriptor manual ulimit setting requirement + // https://github.com/gorakhargosh/watchdog/blob/master/src/watchdog/observers/kqueue.py#L394-L418 + all := set.NewSetF(func(f fileset.File) string { + return f.Absolute + }) + gitFiles, err := s.fileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list files: %s", err) + return nil, err + } + all.Add(gitFiles...) + + include, err := s.includeFileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list include files: %s", err) + return nil, err + } + + all.Add(include...) + + exclude, err := s.excludeFileSet.All() + if err != nil { + log.Errorf(ctx, "cannot list exclude files: %s", err) + return nil, err + } + + for _, f := range exclude { + all.Remove(f) + } + + return all.Iter(), nil +} + func (s *Sync) DestroySnapshot(ctx context.Context) error { return s.snapshot.Destroy(ctx) } diff --git a/libs/sync/sync_test.go b/libs/sync/sync_test.go new file mode 100644 index 000000000..99c7e04b1 --- /dev/null +++ b/libs/sync/sync_test.go @@ -0,0 +1,119 @@ +package sync + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/libs/fileset" + "github.com/databricks/cli/libs/git" + "github.com/stretchr/testify/require" +) + +func createFile(dir string, name string) error { + f, err := os.Create(filepath.Join(dir, name)) + if err != nil { + return err + } + + return f.Close() +} + +func setupFiles(t *testing.T) string { + dir := t.TempDir() + + err := createFile(dir, "a.go") + require.NoError(t, err) + + err = createFile(dir, "b.go") + require.NoError(t, err) + + err = createFile(dir, "ab.go") + require.NoError(t, err) + + err = createFile(dir, "abc.go") + require.NoError(t, err) + + err = createFile(dir, "c.go") + require.NoError(t, err) + + err = createFile(dir, "d.go") + require.NoError(t, err) + + dbDir := filepath.Join(dir, ".databricks") + err = os.Mkdir(dbDir, 0755) + require.NoError(t, err) + + err = createFile(dbDir, "e.go") + require.NoError(t, err) + + return dir + +} + +func TestGetFileSet(t *testing.T) { + ctx := context.Background() + + dir := setupFiles(t) + fileSet, err := git.NewFileSet(dir) + require.NoError(t, err) + + err = fileSet.EnsureValidGitIgnoreExists() + require.NoError(t, err) + + inc, err := fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + excl, err := fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + s := &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err := getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 7) + + inc, err = fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + excl, err = fileset.NewGlobSet(dir, []string{"*.go"}) + require.NoError(t, err) + + s = &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err = getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 1) + + inc, err = fileset.NewGlobSet(dir, []string{".databricks/*.*"}) + require.NoError(t, err) + + excl, err = fileset.NewGlobSet(dir, []string{}) + require.NoError(t, err) + + s = &Sync{ + SyncOptions: &SyncOptions{}, + + fileSet: fileSet, + includeFileSet: inc, + excludeFileSet: excl, + } + + fileList, err = getFileList(ctx, s) + require.NoError(t, err) + require.Equal(t, len(fileList), 8) + +} From ffc78b4b8b7cab8999472ae90e78f8a5db812abd Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 18 Aug 2023 11:29:48 +0200 Subject: [PATCH 070/139] Add template directory flag for bundle templates (#675) ## Changes This flag allows users to initialize a template from a subdirectory in the repo root. Also enables multi template repositories. ## Tests Manually --- cmd/bundle/init.go | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index 14c575827..9ba7e190e 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -43,7 +43,9 @@ func newInitCommand() *cobra.Command { var configFile string var outputDir string + var templateDir string cmd.Flags().StringVar(&configFile, "config-file", "", "File containing input parameters for template initialization.") + cmd.Flags().StringVar(&templateDir, "template-dir", "", "Directory within repository that holds the template specification.") cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to.") cmd.RunE = func(cmd *cobra.Command, args []string) error { @@ -59,19 +61,18 @@ func newInitCommand() *cobra.Command { // Download the template in a temporary directory tmpDir := os.TempDir() templateURL := templatePath - templateDir := filepath.Join(tmpDir, repoName(templateURL)) - err := os.MkdirAll(templateDir, 0755) + repoDir := filepath.Join(tmpDir, repoName(templateURL)) + err := os.MkdirAll(repoDir, 0755) if err != nil { return err } // TODO: Add automated test that the downloaded git repo is cleaned up. - err = git.Clone(ctx, templateURL, "", templateDir) + err = git.Clone(ctx, templateURL, "", repoDir) if err != nil { return err } defer os.RemoveAll(templateDir) - - return template.Materialize(ctx, configFile, templateDir, outputDir) + return template.Materialize(ctx, configFile, filepath.Join(repoDir, templateDir), outputDir) } return cmd From c25bc041b113802cf55c56a5c428445947d8f01f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 21 Aug 2023 09:35:02 +0200 Subject: [PATCH 071/139] Never ignore root directory when enumerating files in a repository (#683) ## Changes The pattern `.*` in a `.gitignore` file can match `.` when walking all files in a repository. If it does, then the walker immediately aborts and no files are returned. The root directory (an unnamed directory) must never be ignored. Reported in https://github.com/databricks/databricks-vscode/issues/837. ## Tests New tests pass. --- libs/fileset/fileset.go | 6 +++--- libs/git/fileset.go | 7 +------ libs/git/fileset_test.go | 31 ++++++++++++++++++------------- libs/git/repository.go | 5 +++++ libs/git/testdata/.gitignore | 3 +++ libs/git/view_test.go | 7 +++++++ 6 files changed, 37 insertions(+), 22 deletions(-) diff --git a/libs/fileset/fileset.go b/libs/fileset/fileset.go index 07494fe83..81b85525c 100644 --- a/libs/fileset/fileset.go +++ b/libs/fileset/fileset.go @@ -39,14 +39,14 @@ func (w *FileSet) Root() string { // Return all tracked files for Repo func (w *FileSet) All() ([]File, error) { - return w.RecursiveListFiles(w.root) + return w.recursiveListFiles() } // Recursively traverses dir in a depth first manner and returns a list of all files // that are being tracked in the FileSet (ie not being ignored for matching one of the // patterns in w.ignore) -func (w *FileSet) RecursiveListFiles(dir string) (fileList []File, err error) { - err = filepath.WalkDir(dir, func(path string, d fs.DirEntry, err error) error { +func (w *FileSet) recursiveListFiles() (fileList []File, err error) { + err = filepath.WalkDir(w.root, func(path string, d fs.DirEntry, err error) error { if err != nil { return err } diff --git a/libs/git/fileset.go b/libs/git/fileset.go index 885a19b99..c604ac7fa 100644 --- a/libs/git/fileset.go +++ b/libs/git/fileset.go @@ -6,7 +6,7 @@ import ( // FileSet is Git repository aware implementation of [fileset.FileSet]. // It forces checking if gitignore files have been modified every -// time a call to [FileSet.All] or [FileSet.RecursiveListFiles] is made. +// time a call to [FileSet.All] is made. type FileSet struct { fileset *fileset.FileSet view *View @@ -43,11 +43,6 @@ func (f *FileSet) All() ([]fileset.File, error) { return f.fileset.All() } -func (f *FileSet) RecursiveListFiles(dir string) ([]fileset.File, error) { - f.view.repo.taintIgnoreRules() - return f.fileset.RecursiveListFiles(dir) -} - func (f *FileSet) EnsureValidGitIgnoreExists() error { return f.view.EnsureValidGitIgnoreExists() } diff --git a/libs/git/fileset_test.go b/libs/git/fileset_test.go index 4fa2ca4b2..74133f525 100644 --- a/libs/git/fileset_test.go +++ b/libs/git/fileset_test.go @@ -10,18 +10,23 @@ import ( "github.com/stretchr/testify/require" ) -func TestFileSetRecursiveListFiles(t *testing.T) { - fileSet, err := NewFileSet("./testdata") +func testFileSetAll(t *testing.T, path string) { + fileSet, err := NewFileSet(path) require.NoError(t, err) - files, err := fileSet.RecursiveListFiles("./testdata") + files, err := fileSet.All() require.NoError(t, err) - require.Len(t, files, 6) - assert.Equal(t, filepath.Join(".gitignore"), files[0].Relative) - assert.Equal(t, filepath.Join("a", ".gitignore"), files[1].Relative) - assert.Equal(t, filepath.Join("a", "b", ".gitignore"), files[2].Relative) - assert.Equal(t, filepath.Join("a", "b", "world.txt"), files[3].Relative) - assert.Equal(t, filepath.Join("a", "hello.txt"), files[4].Relative) - assert.Equal(t, filepath.Join("databricks.yml"), files[5].Relative) + require.Len(t, files, 3) + assert.Equal(t, filepath.Join("a", "b", "world.txt"), files[0].Relative) + assert.Equal(t, filepath.Join("a", "hello.txt"), files[1].Relative) + assert.Equal(t, filepath.Join("databricks.yml"), files[2].Relative) +} + +func TestFileSetListAllInRepo(t *testing.T) { + testFileSetAll(t, "./testdata") +} + +func TestFileSetListAllInTempDir(t *testing.T) { + testFileSetAll(t, copyTestdata(t, "./testdata")) } func TestFileSetNonCleanRoot(t *testing.T) { @@ -32,10 +37,10 @@ func TestFileSetNonCleanRoot(t *testing.T) { require.NoError(t, err) files, err := fileSet.All() require.NoError(t, err) - assert.Len(t, files, 6) + assert.Len(t, files, 3) } -func TestFilesetAddsCacheDirToGitIgnore(t *testing.T) { +func TestFileSetAddsCacheDirToGitIgnore(t *testing.T) { projectDir := t.TempDir() fileSet, err := NewFileSet(projectDir) require.NoError(t, err) @@ -48,7 +53,7 @@ func TestFilesetAddsCacheDirToGitIgnore(t *testing.T) { assert.Contains(t, string(fileBytes), ".databricks") } -func TestFilesetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) { +func TestFileSetDoesNotCacheDirToGitIgnoreIfAlreadyPresent(t *testing.T) { projectDir := t.TempDir() gitIgnorePath := filepath.Join(projectDir, ".gitignore") diff --git a/libs/git/repository.go b/libs/git/repository.go index 2f19cff98..9c847687d 100644 --- a/libs/git/repository.go +++ b/libs/git/repository.go @@ -160,6 +160,11 @@ func (r *Repository) Ignore(relPath string) (bool, error) { trailingSlash = "/" } + // Never ignore the root path (an unnamed path) + if len(parts) == 1 && parts[0] == "." { + return false, nil + } + // Walk over path prefixes to check applicable gitignore files. for i := range parts { prefix := path.Clean(strings.Join(parts[:i], "/")) diff --git a/libs/git/testdata/.gitignore b/libs/git/testdata/.gitignore index 3d68fc1c7..5bfc9c1e7 100644 --- a/libs/git/testdata/.gitignore +++ b/libs/git/testdata/.gitignore @@ -7,3 +7,6 @@ root.* # Directory pattern. ignoredirectory/ + +# Ignore dotfiles +.* diff --git a/libs/git/view_test.go b/libs/git/view_test.go index 795e7b6e4..3ecd301b5 100644 --- a/libs/git/view_test.go +++ b/libs/git/view_test.go @@ -68,8 +68,15 @@ func testViewAtRoot(t *testing.T, tv testView) { assert.True(t, tv.Ignore("root/foo")) assert.True(t, tv.Ignore("root_double")) assert.False(t, tv.Ignore("newfile")) + assert.True(t, tv.Ignore(".gitignore")) + assert.False(t, tv.Ignore("newfile.py")) assert.True(t, tv.Ignore("ignoredirectory/")) + // Never ignore the root directory. + // This is the only path that may be checked as `.`, + // and would match the `.*` ignore pattern if specified. + assert.False(t, tv.Ignore(".")) + // Nested .gitignores should not affect root. assert.False(t, tv.Ignore("a.sh")) From e1ca24d5d5416e974de17da4045e25b6a3235418 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Mon, 21 Aug 2023 11:09:08 +0200 Subject: [PATCH 072/139] Improve 'mode' error message (#681) ## Changes `unsupported value specified for 'mode': %s` was not a helpful error message. --- bundle/config/mutator/process_target_mode.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index b5dc25598..fca4e4b03 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -182,7 +182,7 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { case "": // No action default: - return fmt.Errorf("unsupported value specified for 'mode': %s", b.Config.Bundle.Mode) + return fmt.Errorf("unsupported value '%s' specified for 'mode': must be either 'development' or 'production'", b.Config.Bundle.Mode) } return nil From 5ed635a24091872d00d2a20ee9cefcf829a556ac Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Mon, 21 Aug 2023 18:17:02 +0200 Subject: [PATCH 073/139] Added `databricks account o-auth-enrollment enable` command (#687) This command takes the user through the interactive flow to set up OAuth for a fresh account, where only Basic authentication works. --------- Co-authored-by: Andrew Nester --- cmd/account/o-auth-enrollment/overrides.go | 107 +++++++++++++++++++++ libs/cmdio/io.go | 29 ++++++ 2 files changed, 136 insertions(+) create mode 100644 cmd/account/o-auth-enrollment/overrides.go diff --git a/cmd/account/o-auth-enrollment/overrides.go b/cmd/account/o-auth-enrollment/overrides.go new file mode 100644 index 000000000..1fc3aacc1 --- /dev/null +++ b/cmd/account/o-auth-enrollment/overrides.go @@ -0,0 +1,107 @@ +package o_auth_enrollment + +import ( + "context" + "fmt" + "time" + + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/retries" + "github.com/databricks/databricks-sdk-go/service/oauth2" + "github.com/spf13/cobra" +) + +func promptForBasicAccountConfig(ctx context.Context) (*databricks.Config, error) { + if !cmdio.IsInTTY(ctx) { + return nil, fmt.Errorf("this command requires a TTY") + } + // OAuth Enrollment only works on AWS + host, err := cmdio.DefaultPrompt(ctx, "Host", "https://accounts.cloud.databricks.com") + if err != nil { + return nil, fmt.Errorf("host: %w", err) + } + accountID, err := cmdio.SimplePrompt(ctx, "Account ID") + if err != nil { + return nil, fmt.Errorf("account: %w", err) + } + username, err := cmdio.SimplePrompt(ctx, "Username") + if err != nil { + return nil, fmt.Errorf("username: %w", err) + } + password, err := cmdio.Secret(ctx, "Password") + if err != nil { + return nil, fmt.Errorf("password: %w", err) + } + return &databricks.Config{ + Host: host, + AccountID: accountID, + Username: username, + Password: password, + }, nil +} + +func enableOAuthForAccount(ctx context.Context, cfg *databricks.Config) error { + ac, err := databricks.NewAccountClient(cfg) + if err != nil { + return fmt.Errorf("failed to instantiate account client: %w", err) + } + // The enrollment is executed asynchronously, so the API returns HTTP 204 immediately + err = ac.OAuthEnrollment.Create(ctx, oauth2.CreateOAuthEnrollment{ + EnableAllPublishedApps: true, + }) + if err != nil { + return fmt.Errorf("failed to create oauth enrollment: %w", err) + } + enableSpinner := cmdio.Spinner(ctx) + // The actual enrollment take a few minutes + err = retries.Wait(ctx, 10*time.Minute, func() *retries.Err { + status, err := ac.OAuthEnrollment.Get(ctx) + if err != nil { + return retries.Halt(err) + } + if !status.IsEnabled { + msg := "Enabling OAuth..." + enableSpinner <- msg + return retries.Continues(msg) + } + enableSpinner <- "OAuth is enabled" + close(enableSpinner) + return nil + }) + if err != nil { + return fmt.Errorf("wait for enrollment: %w", err) + } + // enable Databricks CLI, so that `databricks auth login` works + _, err = ac.PublishedAppIntegration.Create(ctx, oauth2.CreatePublishedAppIntegration{ + AppId: "databricks-cli", + }) + if err != nil { + return fmt.Errorf("failed to enable databricks CLI: %w", err) + } + return nil +} + +func newEnable() *cobra.Command { + return &cobra.Command{ + Use: "enable", + Short: "Enable Databricks CLI, Tableau Desktop, and PowerBI for this account.", + Long: `Before you can do 'databricks auth login', you have to enable OAuth for this account. + +This command prompts you for Account ID, username, and password and waits until OAuth is enabled.`, + RunE: func(cmd *cobra.Command, args []string) error { + ctx := cmd.Context() + cfg, err := promptForBasicAccountConfig(ctx) + if err != nil { + return fmt.Errorf("account config: %w", err) + } + return enableOAuthForAccount(ctx, cfg) + }, + } +} + +func init() { + cmdOverrides = append(cmdOverrides, func(c *cobra.Command) { + c.AddCommand(newEnable()) + }) +} diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index bc5a5f302..9d712e351 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -205,6 +205,35 @@ func Prompt(ctx context.Context) *promptui.Prompt { } } +func (c *cmdIO) simplePrompt(label string) *promptui.Prompt { + return &promptui.Prompt{ + Label: label, + Stdin: io.NopCloser(c.in), + Stdout: nopWriteCloser{c.out}, + } +} + +func (c *cmdIO) SimplePrompt(label string) (value string, err error) { + return c.simplePrompt(label).Run() +} + +func SimplePrompt(ctx context.Context, label string) (value string, err error) { + c := fromContext(ctx) + return c.SimplePrompt(label) +} + +func (c *cmdIO) DefaultPrompt(label, defaultValue string) (value string, err error) { + prompt := c.simplePrompt(label) + prompt.Default = defaultValue + prompt.AllowEdit = true + return prompt.Run() +} + +func DefaultPrompt(ctx context.Context, label, defaultValue string) (value string, err error) { + c := fromContext(ctx) + return c.DefaultPrompt(label, defaultValue) +} + func (c *cmdIO) Spinner(ctx context.Context) chan string { var sp *spinner.Spinner if c.interactive { From 4ee926b8858bf1583fcd8bbe9a5222b1594e72ec Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 23 Aug 2023 18:47:07 +0200 Subject: [PATCH 074/139] Added run_as section for bundle configuration (#692) ## Changes Added run_as section for bundle configuration. This section allows to define an user name or service principal which will be applied as an execution identity for jobs and DLT pipelines. In the case of DLT, identity defined in `run_as` will be assigned `IS_OWNER` permission on this pipeline. ## Tests Added unit tests for configuration. Also ran deploy for the following bundle configuration ``` bundle: name: "run_as" run_as: # service_principal_name: "f7263fcc-56d0-4981-8baf-c2a45296690b" user_name: "lennart.kats@databricks.com" resources: pipelines: andrew_pipeline: name: "Andrew Nester pipeline" libraries: - notebook: path: ./test.py jobs: job_one: name: Job One tasks: - task_key: "task" new_cluster: num_workers: 1 spark_version: 13.2.x-snapshot-scala2.12 node_type_id: i3.xlarge runtime_engine: PHOTON notebook_task: notebook_path: "./test.py" ``` --- bundle/config/mutator/run_as.go | 65 +++++++++++++++++++++++ bundle/config/root.go | 8 +++ bundle/config/target.go | 4 ++ bundle/phases/initialize.go | 1 + bundle/tests/run_as/databricks.yml | 42 +++++++++++++++ bundle/tests/run_as_test.go | 82 ++++++++++++++++++++++++++++++ 6 files changed, 202 insertions(+) create mode 100644 bundle/config/mutator/run_as.go create mode 100644 bundle/tests/run_as/databricks.yml create mode 100644 bundle/tests/run_as_test.go diff --git a/bundle/config/mutator/run_as.go b/bundle/config/mutator/run_as.go new file mode 100644 index 000000000..7d1a49175 --- /dev/null +++ b/bundle/config/mutator/run_as.go @@ -0,0 +1,65 @@ +package mutator + +import ( + "context" + "slices" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type setRunAs struct { +} + +// SetRunAs mutator is used to go over defined resources such as Jobs and DLT Pipelines +// And set correct execution identity ("run_as" for a job or "is_owner" permission for DLT) +// if top-level "run-as" section is defined in the configuration. +func SetRunAs() bundle.Mutator { + return &setRunAs{} +} + +func (m *setRunAs) Name() string { + return "SetRunAs" +} + +func (m *setRunAs) Apply(_ context.Context, b *bundle.Bundle) error { + runAs := b.Config.RunAs + if runAs == nil { + return nil + } + + for i := range b.Config.Resources.Jobs { + job := b.Config.Resources.Jobs[i] + if job.RunAs != nil { + continue + } + job.RunAs = &jobs.JobRunAs{ + ServicePrincipalName: runAs.ServicePrincipalName, + UserName: runAs.UserName, + } + } + + me := b.Config.Workspace.CurrentUser.UserName + // If user deploying the bundle and the one defined in run_as are the same + // Do not add IS_OWNER permission. Current user is implied to be an owner in this case. + // Otherwise, it will fail due to this bug https://github.com/databricks/terraform-provider-databricks/issues/2407 + if runAs.UserName == me || runAs.ServicePrincipalName == me { + return nil + } + + for i := range b.Config.Resources.Pipelines { + pipeline := b.Config.Resources.Pipelines[i] + pipeline.Permissions = slices.DeleteFunc(pipeline.Permissions, func(p resources.Permission) bool { + return (runAs.ServicePrincipalName != "" && p.ServicePrincipalName == runAs.ServicePrincipalName) || + (runAs.UserName != "" && p.UserName == runAs.UserName) + }) + pipeline.Permissions = append(pipeline.Permissions, resources.Permission{ + Level: "IS_OWNER", + ServicePrincipalName: runAs.ServicePrincipalName, + UserName: runAs.UserName, + }) + } + + return nil +} diff --git a/bundle/config/root.go b/bundle/config/root.go index e0d20425b..1275dab48 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/ghodss/yaml" "github.com/imdario/mergo" ) @@ -80,6 +81,9 @@ type Root struct { // Sync section specifies options for files synchronization Sync Sync `json:"sync"` + + // RunAs section allows to define an execution identity for jobs and pipelines runs + RunAs *jobs.JobRunAs `json:"run_as,omitempty"` } func Load(path string) (*Root, error) { @@ -237,6 +241,10 @@ func (r *Root) MergeTargetOverrides(target *Target) error { } } + if target.RunAs != nil { + r.RunAs = target.RunAs + } + if target.Mode != "" { r.Bundle.Mode = target.Mode } diff --git a/bundle/config/target.go b/bundle/config/target.go index 10775049d..6a45fdb85 100644 --- a/bundle/config/target.go +++ b/bundle/config/target.go @@ -1,5 +1,7 @@ package config +import "github.com/databricks/databricks-sdk-go/service/jobs" + type Mode string // Target defines overrides for a single target. @@ -31,6 +33,8 @@ type Target struct { Variables map[string]string `json:"variables,omitempty"` Git Git `json:"git,omitempty"` + + RunAs *jobs.JobRunAs `json:"run_as,omitempty"` } const ( diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index 219ec26cf..546a8478b 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -16,6 +16,7 @@ func Initialize() bundle.Mutator { "initialize", []bundle.Mutator{ mutator.PopulateCurrentUser(), + mutator.SetRunAs(), mutator.DefineDefaultWorkspaceRoot(), mutator.ExpandWorkspaceRoot(), mutator.DefineDefaultWorkspacePaths(), diff --git a/bundle/tests/run_as/databricks.yml b/bundle/tests/run_as/databricks.yml new file mode 100644 index 000000000..18ea55736 --- /dev/null +++ b/bundle/tests/run_as/databricks.yml @@ -0,0 +1,42 @@ +bundle: + name: "run_as" + +run_as: + service_principal_name: "my_service_principal" + +targets: + development: + mode: development + run_as: + user_name: "my_user_name" + +resources: + pipelines: + nyc_taxi_pipeline: + permissions: + - level: CAN_VIEW + service_principal_name: my_service_principal + - level: CAN_VIEW + user_name: my_user_name + name: "nyc taxi loader" + libraries: + - notebook: + path: ./dlt/nyc_taxi_loader + jobs: + job_one: + name: Job One + tasks: + - task: + notebook_path: "./test.py" + job_two: + name: Job Two + tasks: + - task: + notebook_path: "./test.py" + job_three: + name: Job Three + run_as: + service_principal_name: "my_service_principal_for_job" + tasks: + - task: + notebook_path: "./test.py" diff --git a/bundle/tests/run_as_test.go b/bundle/tests/run_as_test.go new file mode 100644 index 000000000..44c068165 --- /dev/null +++ b/bundle/tests/run_as_test.go @@ -0,0 +1,82 @@ +package config_tests + +import ( + "context" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/stretchr/testify/assert" +) + +func TestRunAsDefault(t *testing.T) { + b := load(t, "./run_as") + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + ctx := context.Background() + err := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, err) + + assert.Len(t, b.Config.Resources.Jobs, 3) + jobs := b.Config.Resources.Jobs + + assert.NotNil(t, jobs["job_one"].RunAs) + assert.Equal(t, "my_service_principal", jobs["job_one"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_one"].RunAs.UserName) + + assert.NotNil(t, jobs["job_two"].RunAs) + assert.Equal(t, "my_service_principal", jobs["job_two"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_two"].RunAs.UserName) + + assert.NotNil(t, jobs["job_three"].RunAs) + assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_three"].RunAs.UserName) + + pipelines := b.Config.Resources.Pipelines + assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].UserName, "my_user_name") + + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].ServicePrincipalName, "my_service_principal") +} + +func TestRunAsDevelopment(t *testing.T) { + b := loadTarget(t, "./run_as", "development") + b.Config.Workspace.CurrentUser = &config.User{ + User: &iam.User{ + UserName: "jane@doe.com", + }, + } + ctx := context.Background() + err := bundle.Apply(ctx, b, mutator.SetRunAs()) + assert.NoError(t, err) + + assert.Len(t, b.Config.Resources.Jobs, 3) + jobs := b.Config.Resources.Jobs + + assert.NotNil(t, jobs["job_one"].RunAs) + assert.Equal(t, "", jobs["job_one"].RunAs.ServicePrincipalName) + assert.Equal(t, "my_user_name", jobs["job_one"].RunAs.UserName) + + assert.NotNil(t, jobs["job_two"].RunAs) + assert.Equal(t, "", jobs["job_two"].RunAs.ServicePrincipalName) + assert.Equal(t, "my_user_name", jobs["job_two"].RunAs.UserName) + + assert.NotNil(t, jobs["job_three"].RunAs) + assert.Equal(t, "my_service_principal_for_job", jobs["job_three"].RunAs.ServicePrincipalName) + assert.Equal(t, "", jobs["job_three"].RunAs.UserName) + + pipelines := b.Config.Resources.Pipelines + assert.Len(t, pipelines["nyc_taxi_pipeline"].Permissions, 2) + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].Level, "CAN_VIEW") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[0].ServicePrincipalName, "my_service_principal") + + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].Level, "IS_OWNER") + assert.Equal(t, pipelines["nyc_taxi_pipeline"].Permissions[1].UserName, "my_user_name") +} From c5cd20de23fe6ca01d31de0a68bdfcf821804d21 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 24 Aug 2023 13:04:20 +0200 Subject: [PATCH 075/139] Release v0.203.2 (#694) CLI: * Added `databricks account o-auth-enrollment enable` command ([#687](https://github.com/databricks/cli/pull/687)). Bundles: * Do not try auto detect Python package if no Python wheel tasks defined ([#674](https://github.com/databricks/cli/pull/674)). * Renamed `environments` to `targets` in bundle configuration ([#670](https://github.com/databricks/cli/pull/670)). * Rename init project-dir flag to output-dir ([#676](https://github.com/databricks/cli/pull/676)). * Added support for sync.include and sync.exclude sections ([#671](https://github.com/databricks/cli/pull/671)). * Add template directory flag for bundle templates ([#675](https://github.com/databricks/cli/pull/675)). * Never ignore root directory when enumerating files in a repository ([#683](https://github.com/databricks/cli/pull/683)). * Improve 'mode' error message ([#681](https://github.com/databricks/cli/pull/681)). * Added run_as section for bundle configuration ([#692](https://github.com/databricks/cli/pull/692)). --- CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b0b6bc0ed..fa0dec134 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Version changelog +## 0.203.2 + +CLI: + * Added `databricks account o-auth-enrollment enable` command ([#687](https://github.com/databricks/cli/pull/687)). + +Bundles: + * Do not try auto detect Python package if no Python wheel tasks defined ([#674](https://github.com/databricks/cli/pull/674)). + * Renamed `environments` to `targets` in bundle configuration ([#670](https://github.com/databricks/cli/pull/670)). + * Rename init project-dir flag to output-dir ([#676](https://github.com/databricks/cli/pull/676)). + * Added support for sync.include and sync.exclude sections ([#671](https://github.com/databricks/cli/pull/671)). + * Add template directory flag for bundle templates ([#675](https://github.com/databricks/cli/pull/675)). + * Never ignore root directory when enumerating files in a repository ([#683](https://github.com/databricks/cli/pull/683)). + * Improve 'mode' error message ([#681](https://github.com/databricks/cli/pull/681)). + * Added run_as section for bundle configuration ([#692](https://github.com/databricks/cli/pull/692)). + ## 0.203.1 CLI: From a5b86093ecc15989bf8473699e94a2518017488a Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Fri, 25 Aug 2023 11:03:42 +0200 Subject: [PATCH 076/139] Add a foundation for built-in templates (#685) ## Changes This pull request extends the templating support in preparation of a new, default template (WIP, https://github.com/databricks/cli/pull/686): * builtin templates that can be initialized using e.g. `databricks bundle init default-python` * builtin templates are embedded into the executable using go's `embed` functionality, making sure they're co-versioned with the CLI * new helpers to get the workspace name, current user name, etc. help craft a complete template * (not enabled yet) when the user types `databricks bundle init` they can interactively select the `default-python` template And makes two tangentially related changes: * IsServicePrincipal now uses the "users" API rather than the "principals" API, since the latter is too slow for our purposes. * mode: prod no longer requires the 'target.prod.git' setting. It's hard to set that from a template. (Pieter is planning an overhaul of warnings support; this would be one of the first warnings we show.) The actual `default-python` template is maintained in a separate PR: https://github.com/databricks/cli/pull/686 ## Tests Unit tests, manual testing --- .gitignore | 3 + .vscode/__builtins__.pyi | 3 + .vscode/settings.json | 3 +- bundle/config/mutator/process_target_mode.go | 25 +--- .../mutator/process_target_mode_test.go | 11 -- cmd/bundle/init.go | 25 +++- cmd/root/auth.go | 4 + cmd/root/bundle.go | 4 +- libs/auth/service_principal.go | 16 +++ libs/template/helpers.go | 120 +++++++++++++----- libs/template/helpers_test.go | 68 +++++++++- libs/template/materialize.go | 64 +++++++++- libs/template/renderer.go | 6 +- libs/template/renderer_test.go | 46 +++++-- .../databricks_template_schema.json | 9 ++ .../templates/default-python/defaults.json | 3 + .../template/{{.project_name}}/README.md | 3 + .../workspace-host/template/file.tmpl | 2 + 18 files changed, 326 insertions(+), 89 deletions(-) create mode 100644 .vscode/__builtins__.pyi create mode 100644 libs/auth/service_principal.go create mode 100644 libs/template/templates/default-python/databricks_template_schema.json create mode 100644 libs/template/templates/default-python/defaults.json create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/README.md create mode 100644 libs/template/testdata/workspace-host/template/file.tmpl diff --git a/.gitignore b/.gitignore index 5f00a82b3..edd1409ae 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,6 @@ __pycache__ .terraform.lock.hcl .vscode/launch.json +.vscode/tasks.json + +.databricks diff --git a/.vscode/__builtins__.pyi b/.vscode/__builtins__.pyi new file mode 100644 index 000000000..81f9a49ec --- /dev/null +++ b/.vscode/__builtins__.pyi @@ -0,0 +1,3 @@ +# Typings for Pylance in VS Code +# see https://github.com/microsoft/pyright/blob/main/docs/builtins.md +from databricks.sdk.runtime import * diff --git a/.vscode/settings.json b/.vscode/settings.json index 76be94afa..687e0fc02 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -7,5 +7,6 @@ "files.insertFinalNewline": true, "files.trimFinalNewlines": true, "python.envFile": "${workspaceFolder}/.databricks/.databricks.env", - "databricks.python.envFile": "${workspaceFolder}/.env" + "databricks.python.envFile": "${workspaceFolder}/.env", + "python.analysis.stubPath": ".vscode" } diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index fca4e4b03..3a00d42fc 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -8,7 +8,8 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" - "github.com/databricks/databricks-sdk-go/service/iam" + "github.com/databricks/cli/libs/auth" + "github.com/databricks/cli/libs/log" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" ) @@ -111,7 +112,7 @@ func findIncorrectPath(b *bundle.Bundle, mode config.Mode) string { func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUsed bool) error { if b.Config.Bundle.Git.Inferred { env := b.Config.Bundle.Target - return fmt.Errorf("target with 'mode: production' must specify an explicit 'targets.%s.git' configuration", env) + log.Warnf(ctx, "target with 'mode: production' should specify an explicit 'targets.%s.git' configuration", env) } r := b.Config.Resources @@ -138,21 +139,6 @@ func validateProductionMode(ctx context.Context, b *bundle.Bundle, isPrincipalUs return nil } -// Determines whether a service principal identity is used to run the CLI. -func isServicePrincipalUsed(ctx context.Context, b *bundle.Bundle) (bool, error) { - ws := b.WorkspaceClient() - - // Check if a principal with the current user's ID exists. - // We need to use the ListAll method since Get is only usable by admins. - matches, err := ws.ServicePrincipals.ListAll(ctx, iam.ListServicePrincipalsRequest{ - Filter: "id eq " + b.Config.Workspace.CurrentUser.Id, - }) - if err != nil { - return false, err - } - return len(matches) > 0, nil -} - // Determines whether run_as is explicitly set for all resources. // We do this in a best-effort fashion rather than check the top-level // 'run_as' field because the latter is not required to be set. @@ -174,10 +160,7 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { } return transformDevelopmentMode(b) case config.Production: - isPrincipal, err := isServicePrincipalUsed(ctx, b) - if err != nil { - return err - } + isPrincipal := auth.IsServicePrincipal(ctx, b.WorkspaceClient(), b.Config.Workspace.CurrentUser.Id) return validateProductionMode(ctx, b, isPrincipal) case "": // No action diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 76db64dee..489632e17 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -118,17 +118,6 @@ func TestProcessTargetModeProduction(t *testing.T) { assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } -func TestProcessTargetModeProductionGit(t *testing.T) { - bundle := mockBundle(config.Production) - - // Pretend the user didn't set Git configuration explicitly - bundle.Config.Bundle.Git.Inferred = true - - err := validateProductionMode(context.Background(), bundle, false) - require.ErrorContains(t, err, "git") - bundle.Config.Bundle.Git.Inferred = false -} - func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { bundle := mockBundle(config.Production) diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index 9ba7e190e..2127a7bc4 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -1,10 +1,12 @@ package bundle import ( + "errors" "os" "path/filepath" "strings" + "github.com/databricks/cli/cmd/root" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" @@ -36,9 +38,9 @@ func repoName(url string) string { func newInitCommand() *cobra.Command { cmd := &cobra.Command{ - Use: "init TEMPLATE_PATH", + Use: "init [TEMPLATE_PATH]", Short: "Initialize Template", - Args: cobra.ExactArgs(1), + Args: cobra.MaximumNArgs(1), } var configFile string @@ -48,9 +50,26 @@ func newInitCommand() *cobra.Command { cmd.Flags().StringVar(&templateDir, "template-dir", "", "Directory within repository that holds the template specification.") cmd.Flags().StringVar(&outputDir, "output-dir", "", "Directory to write the initialized template to.") + cmd.PreRunE = root.MustWorkspaceClient cmd.RunE = func(cmd *cobra.Command, args []string) error { - templatePath := args[0] ctx := cmd.Context() + var templatePath string + if len(args) > 0 { + templatePath = args[0] + } else { + return errors.New("please specify a template") + + /* TODO: propose to use default-python (once template is ready) + var err error + if !cmdio.IsOutTTY(ctx) || !cmdio.IsInTTY(ctx) { + return errors.New("please specify a template") + } + templatePath, err = cmdio.Ask(ctx, "Template to use", "default-python") + if err != nil { + return err + } + */ + } if !isRepoUrl(templatePath) { // skip downloading the repo because input arg is not a URL. We assume diff --git a/cmd/root/auth.go b/cmd/root/auth.go index 2f32d260e..e56074ef4 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -113,6 +113,10 @@ TRY_AUTH: // or try picking a config profile dynamically return nil } +func SetWorkspaceClient(ctx context.Context, w *databricks.WorkspaceClient) context.Context { + return context.WithValue(ctx, &workspaceClient, w) +} + func transformLoadError(path string, err error) error { if os.IsNotExist(err) { return fmt.Errorf("no configuration file found at %s; please create one first", path) diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index e1c123365..ba7a5dfd8 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -43,7 +43,7 @@ func getTarget(cmd *cobra.Command) (value string) { return target } -func getProfile(cmd *cobra.Command) (value string) { +func GetProfile(cmd *cobra.Command) (value string) { // The command line flag takes precedence. flag := cmd.Flag("profile") if flag != nil { @@ -70,7 +70,7 @@ func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context return nil, nil } - profile := getProfile(cmd) + profile := GetProfile(cmd) if profile != "" { b.Config.Workspace.Profile = profile } diff --git a/libs/auth/service_principal.go b/libs/auth/service_principal.go new file mode 100644 index 000000000..58fcc6a7a --- /dev/null +++ b/libs/auth/service_principal.go @@ -0,0 +1,16 @@ +package auth + +import ( + "context" + + "github.com/databricks/databricks-sdk-go" +) + +// Determines whether a given user id is a service principal. +// This function uses a heuristic: if no user exists with this id, we assume +// it's a service principal. Unfortunately, the standard service principal API is too +// slow for our purposes. +func IsServicePrincipal(ctx context.Context, ws *databricks.WorkspaceClient, userId string) bool { + _, err := ws.Users.GetById(ctx, userId) + return err != nil +} diff --git a/libs/template/helpers.go b/libs/template/helpers.go index ac8466586..b8f2fe456 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -1,10 +1,16 @@ package template import ( + "context" + "errors" "fmt" "net/url" "regexp" "text/template" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/auth" + "github.com/databricks/databricks-sdk-go/service/iam" ) type ErrFail struct { @@ -20,35 +26,87 @@ type pair struct { v any } -var helperFuncs = template.FuncMap{ - "fail": func(format string, args ...any) (any, error) { - return nil, ErrFail{fmt.Sprintf(format, args...)} - }, - // Alias for https://pkg.go.dev/net/url#Parse. Allows usage of all methods of url.URL - "url": func(rawUrl string) (*url.URL, error) { - return url.Parse(rawUrl) - }, - // Alias for https://pkg.go.dev/regexp#Compile. Allows usage of all methods of regexp.Regexp - "regexp": func(expr string) (*regexp.Regexp, error) { - return regexp.Compile(expr) - }, - // A key value pair. This is used with the map function to generate maps - // to use inside a template - "pair": func(k string, v any) pair { - return pair{k, v} - }, - // map converts a list of pairs to a map object. This is useful to pass multiple - // objects to templates defined in the library directory. Go text template - // syntax for invoking a template only allows specifying a single argument, - // this function can be used to workaround that limitation. - // - // For example: {{template "my_template" (map (pair "foo" $arg1) (pair "bar" $arg2))}} - // $arg1 and $arg2 can be referred from inside "my_template" as ".foo" and ".bar" - "map": func(pairs ...pair) map[string]any { - result := make(map[string]any, 0) - for _, p := range pairs { - result[p.k] = p.v - } - return result - }, +func loadHelpers(ctx context.Context) template.FuncMap { + var user *iam.User + var is_service_principal *bool + w := root.WorkspaceClient(ctx) + return template.FuncMap{ + "fail": func(format string, args ...any) (any, error) { + return nil, ErrFail{fmt.Sprintf(format, args...)} + }, + // Alias for https://pkg.go.dev/net/url#Parse. Allows usage of all methods of url.URL + "url": func(rawUrl string) (*url.URL, error) { + return url.Parse(rawUrl) + }, + // Alias for https://pkg.go.dev/regexp#Compile. Allows usage of all methods of regexp.Regexp + "regexp": func(expr string) (*regexp.Regexp, error) { + return regexp.Compile(expr) + }, + // A key value pair. This is used with the map function to generate maps + // to use inside a template + "pair": func(k string, v any) pair { + return pair{k, v} + }, + // map converts a list of pairs to a map object. This is useful to pass multiple + // objects to templates defined in the library directory. Go text template + // syntax for invoking a template only allows specifying a single argument, + // this function can be used to workaround that limitation. + // + // For example: {{template "my_template" (map (pair "foo" $arg1) (pair "bar" $arg2))}} + // $arg1 and $arg2 can be referred from inside "my_template" as ".foo" and ".bar" + "map": func(pairs ...pair) map[string]any { + result := make(map[string]any, 0) + for _, p := range pairs { + result[p.k] = p.v + } + return result + }, + // Get smallest node type (follows Terraform's GetSmallestNodeType) + "smallest_node_type": func() (string, error) { + if w.Config.Host == "" { + return "", errors.New("cannot determine target workspace, please first setup a configuration profile using 'databricks auth login'") + } + if w.Config.IsAzure() { + return "Standard_D3_v2", nil + } else if w.Config.IsGcp() { + return "n1-standard-4", nil + } + return "i3.xlarge", nil + }, + "workspace_host": func() (string, error) { + if w.Config.Host == "" { + return "", errors.New("cannot determine target workspace, please first setup a configuration profile using 'databricks auth login'") + } + return w.Config.Host, nil + }, + "user_name": func() (string, error) { + if user == nil { + var err error + user, err = w.CurrentUser.Me(ctx) + if err != nil { + return "", err + } + } + result := user.UserName + if result == "" { + result = user.Id + } + return result, nil + }, + "is_service_principal": func() (bool, error) { + if is_service_principal != nil { + return *is_service_principal, nil + } + if user == nil { + var err error + user, err = w.CurrentUser.Me(ctx) + if err != nil { + return false, err + } + } + result := auth.IsServicePrincipal(ctx, w, user.Id) + is_service_principal = &result + return result, nil + }, + } } diff --git a/libs/template/helpers_test.go b/libs/template/helpers_test.go index 023eed297..d495ae895 100644 --- a/libs/template/helpers_test.go +++ b/libs/template/helpers_test.go @@ -2,9 +2,15 @@ package template import ( "context" + "os" "strings" "testing" + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go" + workspaceConfig "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -13,7 +19,9 @@ func TestTemplatePrintStringWithoutProcessing(t *testing.T) { ctx := context.Background() tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/print-without-processing/template", "./testdata/print-without-processing/library", tmpDir) + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/print-without-processing/template", "./testdata/print-without-processing/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -28,7 +36,9 @@ func TestTemplateRegexpCompileFunction(t *testing.T) { ctx := context.Background() tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/regexp-compile/template", "./testdata/regexp-compile/library", tmpDir) + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/regexp-compile/template", "./testdata/regexp-compile/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -44,7 +54,9 @@ func TestTemplateUrlFunction(t *testing.T) { ctx := context.Background() tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/urlparse-function/template", "./testdata/urlparse-function/library", tmpDir) + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/urlparse-function/template", "./testdata/urlparse-function/library", tmpDir) require.NoError(t, err) @@ -59,7 +71,9 @@ func TestTemplateMapPairFunction(t *testing.T) { ctx := context.Background() tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/map-pair/template", "./testdata/map-pair/library", tmpDir) + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/map-pair/template", "./testdata/map-pair/library", tmpDir) require.NoError(t, err) @@ -69,3 +83,49 @@ func TestTemplateMapPairFunction(t *testing.T) { assert.Len(t, r.files, 1) assert.Equal(t, "false 123 hello 12.3", string(r.files[0].(*inMemoryFile).content)) } + +func TestWorkspaceHost(t *testing.T) { + ctx := context.Background() + tmpDir := t.TempDir() + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{ + Host: "https://myhost.com", + }, + } + ctx = root.SetWorkspaceClient(ctx, w) + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/workspace-host/template", "./testdata/map-pair/library", tmpDir) + + require.NoError(t, err) + + err = r.walk() + assert.NoError(t, err) + + assert.Len(t, r.files, 1) + assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "https://myhost.com") + assert.Contains(t, string(r.files[0].(*inMemoryFile).content), "i3.xlarge") + +} + +func TestWorkspaceHostNotConfigured(t *testing.T) { + ctx := context.Background() + cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "template") + ctx = cmdio.InContext(ctx, cmd) + tmpDir := t.TempDir() + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{}, + } + ctx = root.SetWorkspaceClient(ctx, w) + + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/workspace-host/template", "./testdata/map-pair/library", tmpDir) + + assert.NoError(t, err) + + err = r.walk() + require.ErrorContains(t, err, "cannot determine target workspace") + +} diff --git a/libs/template/materialize.go b/libs/template/materialize.go index 426646c33..5422160df 100644 --- a/libs/template/materialize.go +++ b/libs/template/materialize.go @@ -2,6 +2,10 @@ package template import ( "context" + "embed" + "io/fs" + "os" + "path" "path/filepath" ) @@ -9,6 +13,9 @@ const libraryDirName = "library" const templateDirName = "template" const schemaFileName = "databricks_template_schema.json" +//go:embed all:templates +var builtinTemplates embed.FS + // This function materializes the input templates as a project, using user defined // configurations. // Parameters: @@ -18,9 +25,21 @@ const schemaFileName = "databricks_template_schema.json" // templateRoot: root of the template definition // outputDir: root of directory where to initialize the template func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir string) error { + // Use a temporary directory in case any builtin templates like default-python are used + tempDir, err := os.MkdirTemp("", "templates") + defer os.RemoveAll(tempDir) + if err != nil { + return err + } + templateRoot, err = prepareBuiltinTemplates(templateRoot, tempDir) + if err != nil { + return err + } + templatePath := filepath.Join(templateRoot, templateDirName) libraryPath := filepath.Join(templateRoot, libraryDirName) schemaPath := filepath.Join(templateRoot, schemaFileName) + helpers := loadHelpers(ctx) config, err := newConfig(ctx, schemaPath) if err != nil { @@ -48,7 +67,7 @@ func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir st } // Walk and render the template, since input configuration is complete - r, err := newRenderer(ctx, config.values, templatePath, libraryPath, outputDir) + r, err := newRenderer(ctx, config.values, helpers, templatePath, libraryPath, outputDir) if err != nil { return err } @@ -56,5 +75,46 @@ func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir st if err != nil { return err } - return r.persistToDisk() + + err = r.persistToDisk() + if err != nil { + return err + } + println("✨ Successfully initialized template") + return nil +} + +// If the given templateRoot matches +func prepareBuiltinTemplates(templateRoot string, tempDir string) (string, error) { + _, err := fs.Stat(builtinTemplates, path.Join("templates", templateRoot)) + if err != nil { + // The given path doesn't appear to be using out built-in templates + return templateRoot, nil + } + + // We have a built-in template with the same name as templateRoot! + // Now we need to make a fully copy of the builtin templates to a real file system + // since template.Parse() doesn't support embed.FS. + err = fs.WalkDir(builtinTemplates, "templates", func(path string, entry fs.DirEntry, err error) error { + if err != nil { + return err + } + + targetPath := filepath.Join(tempDir, path) + if entry.IsDir() { + return os.Mkdir(targetPath, 0755) + } else { + content, err := fs.ReadFile(builtinTemplates, path) + if err != nil { + return err + } + return os.WriteFile(targetPath, content, 0644) + } + }) + + if err != nil { + return "", err + } + + return filepath.Join(tempDir, "templates", templateRoot), nil } diff --git a/libs/template/renderer.go b/libs/template/renderer.go index 9be1b58ec..f4bd99d2c 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -57,9 +57,9 @@ type renderer struct { instanceRoot string } -func newRenderer(ctx context.Context, config map[string]any, templateRoot, libraryRoot, instanceRoot string) (*renderer, error) { +func newRenderer(ctx context.Context, config map[string]any, helpers template.FuncMap, templateRoot, libraryRoot, instanceRoot string) (*renderer, error) { // Initialize new template, with helper functions loaded - tmpl := template.New("").Funcs(helperFuncs) + tmpl := template.New("").Funcs(helpers) // Load user defined associated templates from the library root libraryGlob := filepath.Join(libraryRoot, "*") @@ -104,7 +104,7 @@ func (r *renderer) executeTemplate(templateDefinition string) (string, error) { // Parse the template text tmpl, err = tmpl.Parse(templateDefinition) if err != nil { - return "", err + return "", fmt.Errorf("error in %s: %w", templateDefinition, err) } // Execute template and get result diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index f3f7f2345..a2e5675e8 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -12,6 +12,7 @@ import ( "testing" "text/template" + "github.com/databricks/cli/cmd/root" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -31,7 +32,10 @@ func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) { func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) { tmpDir := t.TempDir() - r, err := newRenderer(context.Background(), nil, "./testdata/email/template", "./testdata/email/library", tmpDir) + ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/email/template", "./testdata/email/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -202,9 +206,11 @@ func TestRendererPersistToDisk(t *testing.T) { func TestRendererWalk(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/walk/template", "./testdata/walk/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/walk/template", "./testdata/walk/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -241,9 +247,11 @@ func TestRendererWalk(t *testing.T) { func TestRendererFailFunction(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/fail/template", "./testdata/fail/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/fail/template", "./testdata/fail/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -252,9 +260,11 @@ func TestRendererFailFunction(t *testing.T) { func TestRendererSkipsDirsEagerly(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/skip-dir-eagerly/template", "./testdata/skip-dir-eagerly/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-dir-eagerly/template", "./testdata/skip-dir-eagerly/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -267,9 +277,11 @@ func TestRendererSkipsDirsEagerly(t *testing.T) { func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/skip-all-files-in-cwd/template", "./testdata/skip-all-files-in-cwd/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-all-files-in-cwd/template", "./testdata/skip-all-files-in-cwd/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -288,9 +300,11 @@ func TestRendererSkipAllFilesInCurrentDirectory(t *testing.T) { func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/skip-is-relative/template", "./testdata/skip-is-relative/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip-is-relative/template", "./testdata/skip-is-relative/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -304,9 +318,11 @@ func TestRendererSkipPatternsAreRelativeToFileDirectory(t *testing.T) { func TestRendererSkip(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/skip/template", "./testdata/skip/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/skip/template", "./testdata/skip/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -335,8 +351,10 @@ func TestRendererReadsPermissionsBits(t *testing.T) { } tmpDir := t.TempDir() ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) - r, err := newRenderer(ctx, nil, "./testdata/executable-bit-read/template", "./testdata/executable-bit-read/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/executable-bit-read/template", "./testdata/executable-bit-read/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -422,9 +440,11 @@ func TestRendererNoErrorOnConflictingFileIfSkipped(t *testing.T) { func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/copy-file-walk/template", "./testdata/copy-file-walk/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/copy-file-walk/template", "./testdata/copy-file-walk/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -437,12 +457,14 @@ func TestRendererNonTemplatesAreCreatedAsCopyFiles(t *testing.T) { func TestRendererFileTreeRendering(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() + helpers := loadHelpers(ctx) r, err := newRenderer(ctx, map[string]any{ "dir_name": "my_directory", "file_name": "my_file", - }, "./testdata/file-tree-rendering/template", "./testdata/file-tree-rendering/library", tmpDir) + }, helpers, "./testdata/file-tree-rendering/template", "./testdata/file-tree-rendering/library", tmpDir) require.NoError(t, err) err = r.walk() @@ -462,9 +484,11 @@ func TestRendererFileTreeRendering(t *testing.T) { func TestRendererSubTemplateInPath(t *testing.T) { ctx := context.Background() + ctx = root.SetWorkspaceClient(ctx, nil) tmpDir := t.TempDir() - r, err := newRenderer(ctx, nil, "./testdata/template-in-path/template", "./testdata/template-in-path/library", tmpDir) + helpers := loadHelpers(ctx) + r, err := newRenderer(ctx, nil, helpers, "./testdata/template-in-path/template", "./testdata/template-in-path/library", tmpDir) require.NoError(t, err) err = r.walk() diff --git a/libs/template/templates/default-python/databricks_template_schema.json b/libs/template/templates/default-python/databricks_template_schema.json new file mode 100644 index 000000000..b680c5fbb --- /dev/null +++ b/libs/template/templates/default-python/databricks_template_schema.json @@ -0,0 +1,9 @@ +{ + "properties": { + "project_name": { + "type": "string", + "default": "my_project", + "description": "Name of the directory" + } + } +} diff --git a/libs/template/templates/default-python/defaults.json b/libs/template/templates/default-python/defaults.json new file mode 100644 index 000000000..99ecd36d2 --- /dev/null +++ b/libs/template/templates/default-python/defaults.json @@ -0,0 +1,3 @@ +{ + "project_name": "my_project" +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md b/libs/template/templates/default-python/template/{{.project_name}}/README.md new file mode 100644 index 000000000..3187b9ed0 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/README.md @@ -0,0 +1,3 @@ +# {{.project_name}} + +The '{{.project_name}}' bundle was generated using the default-python template. diff --git a/libs/template/testdata/workspace-host/template/file.tmpl b/libs/template/testdata/workspace-host/template/file.tmpl new file mode 100644 index 000000000..2098e41b4 --- /dev/null +++ b/libs/template/testdata/workspace-host/template/file.tmpl @@ -0,0 +1,2 @@ +{{workspace_host}} +{{smallest_node_type}} From 861f33d37696413f07fea6c6bb72fa1f8486fda6 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Mon, 28 Aug 2023 00:51:35 -0700 Subject: [PATCH 077/139] Support cluster overrides with cluster_key and compute_key (#696) ## Changes Support `databricks bundle deploy --compute-id my_all_purpose_cluster` in two missing cases. --- bundle/config/mutator/override_compute.go | 6 +-- .../config/mutator/override_compute_test.go | 37 +++++++++++++++++++ 2 files changed, 40 insertions(+), 3 deletions(-) diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index 124392491..ee2e2a825 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -23,10 +23,10 @@ func (m *overrideCompute) Name() string { func overrideJobCompute(j *resources.Job, compute string) { for i := range j.Tasks { task := &j.Tasks[i] - if task.NewCluster != nil { + if task.NewCluster != nil || task.ExistingClusterId != "" || task.ComputeKey != "" || task.JobClusterKey != "" { task.NewCluster = nil - task.ExistingClusterId = compute - } else if task.ExistingClusterId != "" { + task.JobClusterKey = "" + task.ComputeKey = "" task.ExistingClusterId = compute } } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index 9eb99edb9..f04c91c46 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -34,6 +34,12 @@ func TestOverrideDevelopment(t *testing.T) { { ExistingClusterId: "cluster2", }, + { + ComputeKey: "compute_key", + }, + { + JobClusterKey: "cluster_key", + }, }, }}, }, @@ -47,6 +53,12 @@ func TestOverrideDevelopment(t *testing.T) { assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) + assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[2].ExistingClusterId) + assert.Equal(t, "newClusterID", bundle.Config.Resources.Jobs["job1"].Tasks[3].ExistingClusterId) + + assert.Nil(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].NewCluster) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[2].ComputeKey) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[3].JobClusterKey) } func TestOverrideDevelopmentEnv(t *testing.T) { @@ -77,6 +89,31 @@ func TestOverrideDevelopmentEnv(t *testing.T) { assert.Equal(t, "cluster2", bundle.Config.Resources.Jobs["job1"].Tasks[1].ExistingClusterId) } +func TestOverridePipelineTask(t *testing.T) { + os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + bundle := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": {JobSettings: &jobs.JobSettings{ + Name: "job1", + Tasks: []jobs.Task{ + { + PipelineTask: &jobs.PipelineTask{}, + }, + }, + }}, + }, + }, + }, + } + + m := mutator.OverrideCompute() + err := m.Apply(context.Background(), bundle) + require.NoError(t, err) + assert.Empty(t, bundle.Config.Resources.Jobs["job1"].Tasks[0].ExistingClusterId) +} + func TestOverrideProduction(t *testing.T) { bundle := &bundle.Bundle{ Config: config.Root{ From 5f6289e3a71928f8d4f50908db8e88e485b673be Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 28 Aug 2023 18:29:04 +0200 Subject: [PATCH 078/139] Allow referencing local Python wheels without artifacts section defined (#703) ## Changes Now if the user reference local Python wheel files and do not specify "artifacts" section, this file will be automatically uploaded by CLI. Fixes #693 ## Tests Added unit tests Ran bundle deploy for this configuration ``` resources: jobs: some_other_job: name: "[${bundle.environment}] My Wheel Job" tasks: - task_key: TestTask existing_cluster_id: ${var.job_existing_cluster} python_wheel_task: package_name: "my_test_code" entry_point: "run" libraries: - whl: ./dist/*.whl ``` Result ``` andrew.nester@HFW9Y94129 wheel % databricks bundle deploy artifacts.whl.AutoDetect: Detecting Python wheel project... artifacts.whl.AutoDetect: No Python wheel project found at bundle root folder Starting upload of bundle files Uploaded bundle files at /Users/andrew.nester@databricks.com/.bundle/wheel-task/default/files! artifacts.Upload(my_test_code-0.0.1-py3-none-any.whl): Uploading... artifacts.Upload(my_test_code-0.0.1-py3-none-any.whl): Upload succeeded ``` --- bundle/artifacts/autodetect.go | 1 + bundle/artifacts/infer.go | 6 +- bundle/artifacts/whl/from_libraries.go | 56 ++++++++++++++++++ .../.gitignore | 3 + .../bundle.yml | 22 +++++++ .../my_test_code-0.0.1-py3-none-any.whl | Bin 0 -> 1909 bytes bundle/tests/bundle/wheel_test.go | 26 ++++++++ 7 files changed, 113 insertions(+), 1 deletion(-) create mode 100644 bundle/artifacts/whl/from_libraries.go create mode 100644 bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore create mode 100644 bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml create mode 100644 bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl diff --git a/bundle/artifacts/autodetect.go b/bundle/artifacts/autodetect.go index fa8126f97..6e80ef0b6 100644 --- a/bundle/artifacts/autodetect.go +++ b/bundle/artifacts/autodetect.go @@ -28,5 +28,6 @@ func (m *autodetect) Apply(ctx context.Context, b *bundle.Bundle) error { return bundle.Apply(ctx, b, bundle.Seq( whl.DetectPackage(), + whl.DefineArtifactsFromLibraries(), )) } diff --git a/bundle/artifacts/infer.go b/bundle/artifacts/infer.go index 233fbda86..ade5def51 100644 --- a/bundle/artifacts/infer.go +++ b/bundle/artifacts/infer.go @@ -47,7 +47,11 @@ func (m *infer) Apply(ctx context.Context, b *bundle.Bundle) error { return fmt.Errorf("artifact doesn't exist: %s", m.name) } - if artifact.BuildCommand != "" { + // only try to infer command if it's not already defined + // and there is no explicitly files defined which means + // that the package is built outside of bundle cycles + // manually by customer + if artifact.BuildCommand != "" || len(artifact.Files) > 0 { return nil } diff --git a/bundle/artifacts/whl/from_libraries.go b/bundle/artifacts/whl/from_libraries.go new file mode 100644 index 000000000..855e5b943 --- /dev/null +++ b/bundle/artifacts/whl/from_libraries.go @@ -0,0 +1,56 @@ +package whl + +import ( + "context" + "path/filepath" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/libs/log" +) + +type fromLibraries struct{} + +func DefineArtifactsFromLibraries() bundle.Mutator { + return &fromLibraries{} +} + +func (m *fromLibraries) Name() string { + return "artifacts.whl.DefineArtifactsFromLibraries" +} + +func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) error { + if len(b.Config.Artifacts) != 0 { + log.Debugf(ctx, "Skipping defining artifacts from libraries because artifacts section is explicitly defined") + return nil + } + + tasks := libraries.FindAllWheelTasks(b) + for _, task := range tasks { + for _, lib := range task.Libraries { + matches, err := filepath.Glob(filepath.Join(b.Config.Path, lib.Whl)) + // File referenced from libraries section does not exists, skipping + if err != nil { + continue + } + + for _, match := range matches { + name := filepath.Base(match) + if b.Config.Artifacts == nil { + b.Config.Artifacts = make(map[string]*config.Artifact) + } + + log.Debugf(ctx, "Adding an artifact block for %s", match) + b.Config.Artifacts[name] = &config.Artifact{ + Files: []config.ArtifactFile{ + {Source: match}, + }, + Type: config.ArtifactPythonWheel, + } + } + } + } + + return nil +} diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore new file mode 100644 index 000000000..f03e23bc2 --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/.gitignore @@ -0,0 +1,3 @@ +build/ +*.egg-info +.databricks diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml new file mode 100644 index 000000000..1bac4ebad --- /dev/null +++ b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/bundle.yml @@ -0,0 +1,22 @@ +bundle: + name: python-wheel-local + +resources: + jobs: + test_job: + name: "[${bundle.environment}] My Wheel Job" + tasks: + - task_key: TestTask + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./package/*.whl + - task_key: TestTask2 + existing_cluster_id: "0717-aaaaa-bbbbbb" + python_wheel_task: + package_name: "my_test_code" + entry_point: "run" + libraries: + - whl: ./non-existing/*.whl diff --git a/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl b/bundle/tests/bundle/python_wheel_no_artifact_no_setup/package/my_test_code-0.0.1-py3-none-any.whl new file mode 100644 index 0000000000000000000000000000000000000000..14702281d87f6180c03799b19ec1ab6a10f95509 GIT binary patch literal 1909 zcmWIWW@Zs#U|`^2*jp?fe!}|sR6QWi0Ei`kIJYvsB(=CCJ~=-nRX;vHGcU6wK3=b& zvb$g3TUYDcne*NK;Tu8&j4l{oFb;Xt)$gyPd-8;S6UQ!scPxs+J%e^0VsBe z?D2WA7-;q<>~`cPX6AwI2<_X=cSu2`t$f}ie!eZ;{1=vN?&DMITQaeELIL+u(>;5) z$X+r2xBYYBxtn(dG^WH%T*S*7xY*=nSa&BQzw;A`e9N@juAK?%jFWvPw$0CZ@vCv+ zUC#34^olIq*6pQyn)$`I!%h`$?Gjn`C#t5}?EPHZ3!U1Q;tPwvd=RbT^uPH3*Yt>e zM<1Tg)_I<`wpMwQR8ZT=$x|fUIW*1fG>RX#;XTV4(hyusmi%+F9@wUD;JW4nu!?^$dy zymf_ll8nmFA_hkt!=|h^YuC#bJFA^;`|kbeX>&dRJ69 z$LIY8Z@pKnw6_ymom8!NL@#1rQkVUv_Vm}n6HeJ~oXB#=tVhD5W4+QnRF8HC7_Eu{ zdJu$_2zfNz!`0P?`@Dzm*^9i8G_yI#;EM5sBLB1Ax?Vab^*051c%9VI>(pmo8F<0q zg0b0k;|ou?PMz1k_*Lf`uZFJI*^@q-f;2R>e4lc8`fLxF(do6c`hOW@qmTJp+EAUA$0x@kV?}enEUrYFTOy$imY| zRy;-bO4j6ipW_%A7#x@w7?cTlCCJs;Kggx^!s((W1BTXzwewju6GS3P6a`)N*1GB} zo6R$|BhW*GGw|1!x5sAg%8Y;T`}~}=1Cuy9zwc4$mk4%KJyvC%@hBo<^2^E7%>*5v zoVe}L_+0g#4p+3F{uw5}TWgyqthzT(JRh{-dqHsOvazjxvS@7*ijT2DGQG1vS2$uXOsx>kEV}tyyM^QLgJ<_EQLtFYDR9(i4JT8T(!p zvsMQSZum7N^~wA&w+~NeY*}qm{HI*u>qBp=>s#9M1H2iTM3`}xmcTdygC&h13ON-Z zm!0UQpqGabGZ+|_G^Svh0xunrO~F?9AWX3ZCLv_E;4F&JjYQAp2qV>iDH5-d;7pHh z9(vY9n0JVfdFUAx-Dvb2h%nlWnSfJK@*}#r=vfeE?iR*gLgqp;WPmp-8%Ps75E=u0 Jsly850RXT?oKXM( literal 0 HcmV?d00001 diff --git a/bundle/tests/bundle/wheel_test.go b/bundle/tests/bundle/wheel_test.go index ee7457735..f7f0e75e5 100644 --- a/bundle/tests/bundle/wheel_test.go +++ b/bundle/tests/bundle/wheel_test.go @@ -60,3 +60,29 @@ func TestBundlePythonWheelWithDBFSLib(t *testing.T) { err = match.Apply(ctx, b) require.NoError(t, err) } + +func TestBundlePythonWheelBuildNoBuildJustUpload(t *testing.T) { + ctx := context.Background() + b, err := bundle.Load(ctx, "./python_wheel_no_artifact_no_setup") + require.NoError(t, err) + + m := phases.Build() + err = m.Apply(ctx, b) + require.NoError(t, err) + + match := libraries.MatchWithArtifacts() + err = match.Apply(ctx, b) + require.ErrorContains(t, err, "./non-existing/*.whl") + + require.NotZero(t, len(b.Config.Artifacts)) + + artifact := b.Config.Artifacts["my_test_code-0.0.1-py3-none-any.whl"] + require.NotNil(t, artifact) + require.Empty(t, artifact.BuildCommand) + require.Contains(t, artifact.Files[0].Source, filepath.Join( + b.Config.Path, + "package", + "my_test_code-0.0.1-py3-none-any.whl", + )) + require.True(t, artifact.Files[0].NeedsUpload()) +} From 5477afe4f43f27e4880bde87d2fe066e1362f7dd Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 28 Aug 2023 19:05:55 +0200 Subject: [PATCH 079/139] Fixed --environment flag (#705) ## Changes Fixed --environment flag Fixes https://github.com/databricks/setup-cli/issues/35 ## Tests Added regression test --- cmd/root/bundle.go | 2 +- cmd/root/bundle_test.go | 18 ++++++++++++++++-- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index ba7a5dfd8..fe97fbf22 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -26,7 +26,7 @@ func getTarget(cmd *cobra.Command) (value string) { oldFlag := cmd.Flag("environment") if oldFlag != nil { - value = flag.Value.String() + value = oldFlag.Value.String() if value != "" { return } diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 8aff9018f..09b33d589 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -138,7 +138,7 @@ func TestTargetFlagFull(t *testing.T) { err := cmd.ExecuteContext(ctx) assert.NoError(t, err) - assert.Equal(t, cmd.Flag("target").Value.String(), "development") + assert.Equal(t, getTarget(cmd), "development") } func TestTargetFlagShort(t *testing.T) { @@ -150,5 +150,19 @@ func TestTargetFlagShort(t *testing.T) { err := cmd.ExecuteContext(ctx) assert.NoError(t, err) - assert.Equal(t, cmd.Flag("target").Value.String(), "production") + assert.Equal(t, getTarget(cmd), "production") +} + +// TODO: remove when environment flag is fully deprecated +func TestTargetEnvironmentFlag(t *testing.T) { + cmd := emptyCommand(t) + initTargetFlag(cmd) + initEnvironmentFlag(cmd) + cmd.SetArgs([]string{"version", "--environment", "development"}) + + ctx := context.Background() + err := cmd.ExecuteContext(ctx) + assert.NoError(t, err) + + assert.Equal(t, getTarget(cmd), "development") } From 842cd8b7aea55352aed3a60103e352d5332e905b Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 29 Aug 2023 10:26:09 +0200 Subject: [PATCH 080/139] Correctly identify local paths in libraries section (#702) ## Changes Fixes #699 ## Tests Added unit test --- bundle/libraries/libraries.go | 36 +++++++++++++++++++++++++----- bundle/libraries/libraries_test.go | 30 +++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 6 deletions(-) create mode 100644 bundle/libraries/libraries_test.go diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index 29848236c..d26768f95 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -3,6 +3,7 @@ package libraries import ( "context" "fmt" + "net/url" "path/filepath" "strings" @@ -92,13 +93,13 @@ func findArtifactsAndMarkForUpload(ctx context.Context, lib *compute.Library, b } if len(matches) == 0 && isLocalLibrary(lib) { - return fmt.Errorf("no library found for %s", libPath(lib)) + return fmt.Errorf("file %s is referenced in libraries section but doesn't exist on the local file system", libPath(lib)) } for _, match := range matches { af, err := findArtifactFileByLocalPath(match, b) if err != nil { - cmdio.LogString(ctx, fmt.Sprintf("%s. Skipping %s. In order to use the library upload it manually", err.Error(), match)) + cmdio.LogString(ctx, fmt.Sprintf("%s. Skipping uploading. In order to use the define 'artifacts' section", err.Error())) } else { af.Libraries = append(af.Libraries, lib) } @@ -116,7 +117,7 @@ func findArtifactFileByLocalPath(path string, b *bundle.Bundle) (*config.Artifac } } - return nil, fmt.Errorf("artifact file is not found for path %s", path) + return nil, fmt.Errorf("artifact section is not defined for file at %s", path) } func libPath(library *compute.Library) string { @@ -139,11 +140,34 @@ func isLocalLibrary(library *compute.Library) bool { return false } - return !isDbfsPath(path) && !isWorkspacePath(path) + if isExplicitFileScheme(path) { + return true + } + + if isRemoteStorageScheme(path) { + return false + } + + return !isWorkspacePath(path) } -func isDbfsPath(path string) bool { - return strings.HasPrefix(path, "dbfs:/") +func isExplicitFileScheme(path string) bool { + return strings.HasPrefix(path, "file://") +} + +func isRemoteStorageScheme(path string) bool { + url, err := url.Parse(path) + if err != nil { + return false + } + + if url.Scheme == "" { + return false + } + + // If the path starts with scheme:// format, it's a correct remote storage scheme + return strings.HasPrefix(path, url.Scheme+"://") + } func isWorkspacePath(path string) bool { diff --git a/bundle/libraries/libraries_test.go b/bundle/libraries/libraries_test.go new file mode 100644 index 000000000..050efe749 --- /dev/null +++ b/bundle/libraries/libraries_test.go @@ -0,0 +1,30 @@ +package libraries + +import ( + "fmt" + "testing" + + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/stretchr/testify/require" +) + +var testCases map[string]bool = map[string]bool{ + "./some/local/path": true, + "/some/full/path": true, + "/Workspace/path/to/package": false, + "/Users/path/to/package": false, + "file://path/to/package": true, + "C:\\path\\to\\package": true, + "dbfs://path/to/package": false, + "s3://path/to/package": false, + "abfss://path/to/package": false, +} + +func TestIsLocalLbrary(t *testing.T) { + for p, result := range testCases { + lib := compute.Library{ + Whl: p, + } + require.Equal(t, result, isLocalLibrary(&lib), fmt.Sprintf("isLocalLibrary must return %t for path %s ", result, p)) + } +} From 3f2cf3c6b73de97df75e11e37bf1f759c6ab8006 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Tue, 29 Aug 2023 10:26:26 +0200 Subject: [PATCH 081/139] Fixed path joining in FindFilesWithSuffixInPath (#704) ## Changes Fixes #693 ## Tests Newly added tests failed before the fix: https://github.com/databricks/cli/actions/runs/6000754026/job/16273507998?pr=704 --- python/utils.go | 8 ++++---- python/utils_test.go | 21 +++++++++++++++++++++ 2 files changed, 25 insertions(+), 4 deletions(-) create mode 100644 python/utils_test.go diff --git a/python/utils.go b/python/utils.go index 10654edc0..a8408fae2 100644 --- a/python/utils.go +++ b/python/utils.go @@ -5,7 +5,7 @@ package python import ( "context" "os" - "path" + "path/filepath" "strings" "github.com/databricks/cli/libs/log" @@ -13,8 +13,8 @@ import ( func CleanupWheelFolder(dir string) { // there or not there - we don't care - os.RemoveAll(path.Join(dir, "__pycache__")) - os.RemoveAll(path.Join(dir, "build")) + os.RemoveAll(filepath.Join(dir, "__pycache__")) + os.RemoveAll(filepath.Join(dir, "build")) eggInfo := FindFilesWithSuffixInPath(dir, ".egg-info") if len(eggInfo) == 0 { return @@ -42,7 +42,7 @@ func FindFilesWithSuffixInPath(dir, suffix string) []string { if !strings.HasSuffix(child.Name(), suffix) { continue } - files = append(files, path.Join(dir, child.Name())) + files = append(files, filepath.Join(dir, child.Name())) } return files } diff --git a/python/utils_test.go b/python/utils_test.go new file mode 100644 index 000000000..1656d1ecb --- /dev/null +++ b/python/utils_test.go @@ -0,0 +1,21 @@ +package python + +import ( + "os" + "path/filepath" + "testing" + + "github.com/stretchr/testify/require" +) + +func TestFindFilesWithSuffixInPath(t *testing.T) { + dir, err := os.Getwd() + require.NoError(t, err) + + files := FindFilesWithSuffixInPath(dir, "test.go") + + matches, err := filepath.Glob(filepath.Join(dir, "*test.go")) + require.NoError(t, err) + + require.ElementsMatch(t, files, matches) +} From 12368e3382f59cfdba3bbc775423181f458c62cb Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 30 Aug 2023 14:21:39 +0200 Subject: [PATCH 082/139] Added transformation mutator for Python wheel task for them to work on DBR <13.1 (#635) ## Changes ***Note: this PR relies on sync.include functionality from here: https://github.com/databricks/cli/pull/671*** Added transformation mutator for Python wheel task for them to work on DBR <13.1 Using wheels upload to Workspace file system as cluster libraries is not supported in DBR < 13.1 In order to make Python wheel work correctly on DBR < 13.1 we do the following: 1. Build and upload python wheel as usual 2. Transform python wheel task into special notebook task which does the following a. Installs all necessary wheels with %pip magic b. Executes defined entry point with all provided parameters 3. Upload this notebook file to workspace file system 4. Deploy transformed job task This is also beneficial for executing on existing clusters because this notebook always reinstall wheels so if there are any changes to the wheel package, they are correctly picked up ## Tests bundle.yml ```yaml bundle: name: wheel-task workspace: host: **** resources: jobs: test_job: name: "[${bundle.environment}] My Wheel Job" tasks: - task_key: TestTask existing_cluster_id: "***" python_wheel_task: package_name: "my_test_code" entry_point: "run" parameters: ["first argument","first value","second argument","second value"] libraries: - whl: ./dist/*.whl ``` Output ``` andrew.nester@HFW9Y94129 wheel % databricks bundle run test_job Run URL: *** 2023-08-03 15:58:04 "[default] My Wheel Job" TERMINATED SUCCESS Output: ======= Task TestTask: Hello from my func Got arguments v1: ['python', 'first argument', 'first value', 'second argument', 'second value'] ``` --- bundle/config/mutator/trampoline.go | 100 +++++++++++++++++++++ bundle/config/mutator/trampoline_test.go | 97 ++++++++++++++++++++ bundle/phases/deploy.go | 4 +- bundle/python/transform.go | 109 +++++++++++++++++++++++ bundle/python/transform_test.go | 66 ++++++++++++++ 5 files changed, 375 insertions(+), 1 deletion(-) create mode 100644 bundle/config/mutator/trampoline.go create mode 100644 bundle/config/mutator/trampoline_test.go create mode 100644 bundle/python/transform.go create mode 100644 bundle/python/transform_test.go diff --git a/bundle/config/mutator/trampoline.go b/bundle/config/mutator/trampoline.go new file mode 100644 index 000000000..7c06c7fa6 --- /dev/null +++ b/bundle/config/mutator/trampoline.go @@ -0,0 +1,100 @@ +package mutator + +import ( + "context" + "fmt" + "os" + "path" + "path/filepath" + "text/template" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +type TaskWithJobKey struct { + Task *jobs.Task + JobKey string +} + +type TrampolineFunctions interface { + GetTemplateData(task *jobs.Task) (map[string]any, error) + GetTasks(b *bundle.Bundle) []TaskWithJobKey + CleanUp(task *jobs.Task) error +} +type trampoline struct { + name string + functions TrampolineFunctions + template string +} + +func NewTrampoline( + name string, + functions TrampolineFunctions, + template string, +) *trampoline { + return &trampoline{name, functions, template} +} + +func (m *trampoline) Name() string { + return fmt.Sprintf("trampoline(%s)", m.name) +} + +func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { + tasks := m.functions.GetTasks(b) + for _, task := range tasks { + err := m.generateNotebookWrapper(b, task) + if err != nil { + return err + } + } + return nil +} + +func (m *trampoline) generateNotebookWrapper(b *bundle.Bundle, task TaskWithJobKey) error { + internalDir, err := b.InternalDir() + if err != nil { + return err + } + + notebookName := fmt.Sprintf("notebook_%s_%s", task.JobKey, task.Task.TaskKey) + localNotebookPath := filepath.Join(internalDir, notebookName+".py") + + err = os.MkdirAll(filepath.Dir(localNotebookPath), 0755) + if err != nil { + return err + } + + f, err := os.Create(localNotebookPath) + if err != nil { + return err + } + defer f.Close() + + data, err := m.functions.GetTemplateData(task.Task) + if err != nil { + return err + } + + t, err := template.New(notebookName).Parse(m.template) + if err != nil { + return err + } + + internalDirRel, err := filepath.Rel(b.Config.Path, internalDir) + if err != nil { + return err + } + + err = m.functions.CleanUp(task.Task) + if err != nil { + return err + } + remotePath := path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(internalDirRel), notebookName) + + task.Task.NotebookTask = &jobs.NotebookTask{ + NotebookPath: remotePath, + } + + return t.Execute(f, data) +} diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go new file mode 100644 index 000000000..e523250e0 --- /dev/null +++ b/bundle/config/mutator/trampoline_test.go @@ -0,0 +1,97 @@ +package mutator + +import ( + "context" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/require" +) + +type functions struct{} + +func (f *functions) GetTasks(b *bundle.Bundle) []TaskWithJobKey { + tasks := make([]TaskWithJobKey, 0) + for k := range b.Config.Resources.Jobs["test"].Tasks { + tasks = append(tasks, TaskWithJobKey{ + JobKey: "test", + Task: &b.Config.Resources.Jobs["test"].Tasks[k], + }) + } + + return tasks +} + +func (f *functions) GetTemplateData(task *jobs.Task) (map[string]any, error) { + if task.PythonWheelTask == nil { + return nil, fmt.Errorf("PythonWheelTask cannot be nil") + } + + data := make(map[string]any) + data["MyName"] = "Trampoline" + return data, nil +} + +func (f *functions) CleanUp(task *jobs.Task) error { + task.PythonWheelTask = nil + return nil +} + +func TestGenerateTrampoline(t *testing.T) { + tmpDir := t.TempDir() + + tasks := []jobs.Task{ + { + TaskKey: "to_trampoline", + PythonWheelTask: &jobs.PythonWheelTask{ + PackageName: "test", + EntryPoint: "run", + }}, + } + + b := &bundle.Bundle{ + Config: config.Root{ + Path: tmpDir, + Bundle: config.Bundle{ + Target: "development", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "test": { + Paths: resources.Paths{ + ConfigFilePath: tmpDir, + }, + JobSettings: &jobs.JobSettings{ + Tasks: tasks, + }, + }, + }, + }, + }, + } + ctx := context.Background() + + funcs := functions{} + trampoline := NewTrampoline("test_trampoline", &funcs, "Hello from {{.MyName}}") + err := bundle.Apply(ctx, b, trampoline) + require.NoError(t, err) + + dir, err := b.InternalDir() + require.NoError(t, err) + filename := filepath.Join(dir, "notebook_test_to_trampoline.py") + + bytes, err := os.ReadFile(filename) + require.NoError(t, err) + + require.Equal(t, "Hello from Trampoline", string(bytes)) + + task := b.Config.Resources.Jobs["test"].Tasks[0] + require.Equal(t, task.NotebookTask.NotebookPath, ".databricks/bundle/development/.internal/notebook_test_to_trampoline") + require.Nil(t, task.PythonWheelTask) +} diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 011bb4b2b..5a9a7f2fe 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -8,6 +8,7 @@ import ( "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/libraries" + "github.com/databricks/cli/bundle/python" ) // The deploy phase deploys artifacts and resources. @@ -17,10 +18,11 @@ func Deploy() bundle.Mutator { bundle.Defer( bundle.Seq( mutator.ValidateGitDetails(), - files.Upload(), libraries.MatchWithArtifacts(), artifacts.CleanUp(), artifacts.UploadAll(), + python.TransformWheelTask(), + files.Upload(), terraform.Interpolate(), terraform.Write(), terraform.StatePull(), diff --git a/bundle/python/transform.go b/bundle/python/transform.go new file mode 100644 index 000000000..69bb5766f --- /dev/null +++ b/bundle/python/transform.go @@ -0,0 +1,109 @@ +package python + +import ( + "fmt" + "strconv" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +const NOTEBOOK_TEMPLATE = `# Databricks notebook source +%python +{{range .Libraries}} +%pip install --force-reinstall {{.Whl}} +{{end}} + +try: + from importlib import metadata +except ImportError: # for Python<3.8 + import subprocess + import sys + + subprocess.check_call([sys.executable, "-m", "pip", "install", "importlib-metadata"]) + import importlib_metadata as metadata + +from contextlib import redirect_stdout +import io +import sys +sys.argv = [{{.Params}}] + +entry = [ep for ep in metadata.distribution("{{.Task.PackageName}}").entry_points if ep.name == "{{.Task.EntryPoint}}"] + +f = io.StringIO() +with redirect_stdout(f): + if entry: + entry[0].load()() + else: + raise ImportError("Entry point '{{.Task.EntryPoint}}' not found") +s = f.getvalue() +dbutils.notebook.exit(s) +` + +// This mutator takes the wheel task and transforms it into notebook +// which installs uploaded wheels using %pip and then calling corresponding +// entry point. +func TransformWheelTask() bundle.Mutator { + return mutator.NewTrampoline( + "python_wheel", + &pythonTrampoline{}, + NOTEBOOK_TEMPLATE, + ) +} + +type pythonTrampoline struct{} + +func (t *pythonTrampoline) CleanUp(task *jobs.Task) error { + task.PythonWheelTask = nil + task.Libraries = nil + + return nil +} + +func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { + r := b.Config.Resources + result := make([]mutator.TaskWithJobKey, 0) + for k := range b.Config.Resources.Jobs { + tasks := r.Jobs[k].JobSettings.Tasks + for i := range tasks { + task := &tasks[i] + result = append(result, mutator.TaskWithJobKey{ + JobKey: k, + Task: task, + }) + } + } + return result +} + +func (t *pythonTrampoline) GetTemplateData(task *jobs.Task) (map[string]any, error) { + params, err := t.generateParameters(task.PythonWheelTask) + if err != nil { + return nil, err + } + + data := map[string]any{ + "Libraries": task.Libraries, + "Params": params, + "Task": task.PythonWheelTask, + } + + return data, nil +} + +func (t *pythonTrampoline) generateParameters(task *jobs.PythonWheelTask) (string, error) { + if task.Parameters != nil && task.NamedParameters != nil { + return "", fmt.Errorf("not allowed to pass both paramaters and named_parameters") + } + params := append([]string{"python"}, task.Parameters...) + for k, v := range task.NamedParameters { + params = append(params, fmt.Sprintf("%s=%s", k, v)) + } + + for i := range params { + params[i] = strconv.Quote(params[i]) + } + return strings.Join(params, ", "), nil +} diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go new file mode 100644 index 000000000..1baebfc8e --- /dev/null +++ b/bundle/python/transform_test.go @@ -0,0 +1,66 @@ +package python + +import ( + "strings" + "testing" + + "github.com/databricks/databricks-sdk-go/service/jobs" + "github.com/stretchr/testify/require" +) + +type testCase struct { + Actual []string + Expected string +} +type NamedParams map[string]string +type testCaseNamed struct { + Actual NamedParams + Expected string +} + +var paramsTestCases []testCase = []testCase{ + {[]string{}, `"python"`}, + {[]string{"a"}, `"python", "a"`}, + {[]string{"a", "b"}, `"python", "a", "b"`}, + {[]string{"123!@#$%^&*()-="}, `"python", "123!@#$%^&*()-="`}, + {[]string{`{"a": 1}`}, `"python", "{\"a\": 1}"`}, +} + +var paramsTestCasesNamed []testCaseNamed = []testCaseNamed{ + {NamedParams{}, `"python"`}, + {NamedParams{"a": "1"}, `"python", "a=1"`}, + {NamedParams{"a": "'1'"}, `"python", "a='1'"`}, + {NamedParams{"a": `"1"`}, `"python", "a=\"1\""`}, + {NamedParams{"a": "1", "b": "2"}, `"python", "a=1", "b=2"`}, + {NamedParams{"data": `{"a": 1}`}, `"python", "data={\"a\": 1}"`}, +} + +func TestGenerateParameters(t *testing.T) { + trampoline := pythonTrampoline{} + for _, c := range paramsTestCases { + task := &jobs.PythonWheelTask{Parameters: c.Actual} + result, err := trampoline.generateParameters(task) + require.NoError(t, err) + require.Equal(t, c.Expected, result) + } +} + +func TestGenerateNamedParameters(t *testing.T) { + trampoline := pythonTrampoline{} + for _, c := range paramsTestCasesNamed { + task := &jobs.PythonWheelTask{NamedParameters: c.Actual} + result, err := trampoline.generateParameters(task) + require.NoError(t, err) + + // parameters order can be undetermenistic, so just check that they exist as expected + require.ElementsMatch(t, strings.Split(c.Expected, ","), strings.Split(result, ",")) + } +} + +func TestGenerateBoth(t *testing.T) { + trampoline := pythonTrampoline{} + task := &jobs.PythonWheelTask{NamedParameters: map[string]string{"a": "1"}, Parameters: []string{"b"}} + _, err := trampoline.generateParameters(task) + require.Error(t, err) + require.ErrorContains(t, err, "not allowed to pass both paramaters and named_parameters") +} From ca2f1dc06c8a7324ac38c35c0f35856348cec918 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 30 Aug 2023 15:51:15 +0200 Subject: [PATCH 083/139] Filter down to Python wheel tasks only for trampoline (#712) ## Changes Fixes issue introduced in #635. ## Tests Added new unit test to confirm correct behavior. Manually deployed sample bundle. --- bundle/python/transform.go | 6 ++++ bundle/python/transform_test.go | 51 +++++++++++++++++++++++++++------ 2 files changed, 49 insertions(+), 8 deletions(-) diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 69bb5766f..6ec75a038 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -69,6 +69,12 @@ func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { tasks := r.Jobs[k].JobSettings.Tasks for i := range tasks { task := &tasks[i] + + // Keep only Python wheel tasks + if task.PythonWheelTask == nil { + continue + } + result = append(result, mutator.TaskWithJobKey{ JobKey: k, Task: task, diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index 1baebfc8e..c7b1f36e7 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -4,6 +4,9 @@ import ( "strings" "testing" + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" ) @@ -12,9 +15,9 @@ type testCase struct { Actual []string Expected string } -type NamedParams map[string]string + type testCaseNamed struct { - Actual NamedParams + Actual map[string]string Expected string } @@ -27,12 +30,12 @@ var paramsTestCases []testCase = []testCase{ } var paramsTestCasesNamed []testCaseNamed = []testCaseNamed{ - {NamedParams{}, `"python"`}, - {NamedParams{"a": "1"}, `"python", "a=1"`}, - {NamedParams{"a": "'1'"}, `"python", "a='1'"`}, - {NamedParams{"a": `"1"`}, `"python", "a=\"1\""`}, - {NamedParams{"a": "1", "b": "2"}, `"python", "a=1", "b=2"`}, - {NamedParams{"data": `{"a": 1}`}, `"python", "data={\"a\": 1}"`}, + {map[string]string{}, `"python"`}, + {map[string]string{"a": "1"}, `"python", "a=1"`}, + {map[string]string{"a": "'1'"}, `"python", "a='1'"`}, + {map[string]string{"a": `"1"`}, `"python", "a=\"1\""`}, + {map[string]string{"a": "1", "b": "2"}, `"python", "a=1", "b=2"`}, + {map[string]string{"data": `{"a": 1}`}, `"python", "data={\"a\": 1}"`}, } func TestGenerateParameters(t *testing.T) { @@ -64,3 +67,35 @@ func TestGenerateBoth(t *testing.T) { require.Error(t, err) require.ErrorContains(t, err, "not allowed to pass both paramaters and named_parameters") } + +func TestTransformFiltersWheelTasksOnly(t *testing.T) { + trampoline := pythonTrampoline{} + bundle := &bundle.Bundle{ + Config: config.Root{ + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "job1": { + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + TaskKey: "key1", + PythonWheelTask: &jobs.PythonWheelTask{}, + }, + { + TaskKey: "key2", + NotebookTask: &jobs.NotebookTask{}, + }, + }, + }, + }, + }, + }, + }, + } + + tasks := trampoline.GetTasks(bundle) + require.Len(t, tasks, 1) + require.Equal(t, "job1", tasks[0].JobKey) + require.Equal(t, "key1", tasks[0].Task.TaskKey) + require.NotNil(t, tasks[0].Task.PythonWheelTask) +} From aa9e1fc41ce5b3abd99f55590c269149596c3611 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 30 Aug 2023 15:58:28 +0200 Subject: [PATCH 084/139] Update Terraform provider schema structs from 1.23.0 (#713) ## Changes The provider at version 1.24.0 includes a regression for the MLflow model resource. To fix this, we explicitly pin the provider version at the version we generate bindings for. ## Tests Confirmed that a deploy of said MLflow model resource works with 1.23.0. --- .../tf/codegen/generator/generator.go | 30 +++++++++++++++++ bundle/internal/tf/codegen/schema/generate.go | 8 +++-- bundle/internal/tf/codegen/schema/version.go | 3 ++ .../tf/codegen/templates/root.go.tmpl | 32 +++++++++++++++++++ .../internal/tf/schema/data_source_cluster.go | 1 + .../tf/schema/data_source_instance_pool.go | 1 + bundle/internal/tf/schema/data_source_job.go | 21 ++++++++++++ bundle/internal/tf/schema/resource_cluster.go | 1 + .../internal/tf/schema/resource_connection.go | 15 +++++++++ .../tf/schema/resource_instance_pool.go | 1 + bundle/internal/tf/schema/resource_job.go | 21 ++++++++++++ .../tf/schema/resource_model_serving.go | 1 + .../internal/tf/schema/resource_pipeline.go | 1 + bundle/internal/tf/schema/root.go | 2 +- 14 files changed, 134 insertions(+), 4 deletions(-) create mode 100644 bundle/internal/tf/codegen/schema/version.go create mode 100644 bundle/internal/tf/codegen/templates/root.go.tmpl create mode 100644 bundle/internal/tf/schema/resource_connection.go diff --git a/bundle/internal/tf/codegen/generator/generator.go b/bundle/internal/tf/codegen/generator/generator.go index 2bd78d96f..86d762439 100644 --- a/bundle/internal/tf/codegen/generator/generator.go +++ b/bundle/internal/tf/codegen/generator/generator.go @@ -8,6 +8,7 @@ import ( "strings" "text/template" + schemapkg "github.com/databricks/cli/bundle/internal/tf/codegen/schema" tfjson "github.com/hashicorp/terraform-json" ) @@ -32,6 +33,23 @@ func (c *collection) Generate(path string) error { return tmpl.Execute(f, c) } +type root struct { + OutputFile string + ProviderVersion string +} + +func (r *root) Generate(path string) error { + tmpl := template.Must(template.ParseFiles(fmt.Sprintf("./templates/%s.tmpl", r.OutputFile))) + f, err := os.Create(filepath.Join(path, r.OutputFile)) + if err != nil { + return err + } + + defer f.Close() + + return tmpl.Execute(f, r) +} + func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error { // Generate types for resources. var resources []*namedBlock @@ -105,5 +123,17 @@ func Run(ctx context.Context, schema *tfjson.ProviderSchema, path string) error } } + // Generate root.go + { + r := &root{ + OutputFile: "root.go", + ProviderVersion: schemapkg.ProviderVersion, + } + err := r.Generate(path) + if err != nil { + return err + } + } + return nil } diff --git a/bundle/internal/tf/codegen/schema/generate.go b/bundle/internal/tf/codegen/schema/generate.go index 4d3e2832b..de2d27225 100644 --- a/bundle/internal/tf/codegen/schema/generate.go +++ b/bundle/internal/tf/codegen/schema/generate.go @@ -8,6 +8,7 @@ import ( "os" "path/filepath" + "github.com/hashicorp/go-version" "github.com/hashicorp/hc-install/product" "github.com/hashicorp/hc-install/releases" "github.com/hashicorp/terraform-exec/tfexec" @@ -19,7 +20,7 @@ func (s *Schema) writeTerraformBlock(_ context.Context) error { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": ">= 1.0.0", + "version": ProviderVersion, }, }, }, @@ -40,9 +41,10 @@ func (s *Schema) installTerraform(ctx context.Context) (path string, err error) return } - installer := &releases.LatestVersion{ - InstallDir: installDir, + installer := &releases.ExactVersion{ Product: product.Terraform, + Version: version.Must(version.NewVersion("1.5.5")), + InstallDir: installDir, } installer.SetLogger(log.Default()) diff --git a/bundle/internal/tf/codegen/schema/version.go b/bundle/internal/tf/codegen/schema/version.go new file mode 100644 index 000000000..84456731f --- /dev/null +++ b/bundle/internal/tf/codegen/schema/version.go @@ -0,0 +1,3 @@ +package schema + +const ProviderVersion = "1.23.0" diff --git a/bundle/internal/tf/codegen/templates/root.go.tmpl b/bundle/internal/tf/codegen/templates/root.go.tmpl new file mode 100644 index 000000000..3beb30072 --- /dev/null +++ b/bundle/internal/tf/codegen/templates/root.go.tmpl @@ -0,0 +1,32 @@ +package schema + +type Providers struct { + Databricks *Config `json:"databricks,omitempty"` +} + +func NewProviders() *Providers { + return &Providers{ + Databricks: &Config{}, + } +} + +type Root struct { + Terraform map[string]any `json:"terraform"` + + Provider *Providers `json:"provider,omitempty"` + Data *DataSources `json:"data,omitempty"` + Resource *Resources `json:"resource,omitempty"` +} + +func NewRoot() *Root { + return &Root{ + Terraform: map[string]interface{}{ + "required_providers": map[string]interface{}{ + "databricks": map[string]interface{}{ + "source": "databricks/databricks", + "version": "1.23.0", + }, + }, + }, + } +} diff --git a/bundle/internal/tf/schema/data_source_cluster.go b/bundle/internal/tf/schema/data_source_cluster.go index b5017402b..2aa6fb5d2 100644 --- a/bundle/internal/tf/schema/data_source_cluster.go +++ b/bundle/internal/tf/schema/data_source_cluster.go @@ -90,6 +90,7 @@ type DataSourceClusterClusterInfoGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/data_source_instance_pool.go b/bundle/internal/tf/schema/data_source_instance_pool.go index 498247174..240083d64 100644 --- a/bundle/internal/tf/schema/data_source_instance_pool.go +++ b/bundle/internal/tf/schema/data_source_instance_pool.go @@ -26,6 +26,7 @@ type DataSourceInstancePoolPoolInfoDiskSpec struct { type DataSourceInstancePoolPoolInfoGcpAttributes struct { GcpAvailability string `json:"gcp_availability,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` } type DataSourceInstancePoolPoolInfoInstancePoolFleetAttributesFleetOnDemandOption struct { diff --git a/bundle/internal/tf/schema/data_source_job.go b/bundle/internal/tf/schema/data_source_job.go index 6d2d1aa9b..d251dfe5e 100644 --- a/bundle/internal/tf/schema/data_source_job.go +++ b/bundle/internal/tf/schema/data_source_job.go @@ -124,6 +124,7 @@ type DataSourceJobJobSettingsSettingsJobClusterNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -305,6 +306,7 @@ type DataSourceJobJobSettingsSettingsNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -401,6 +403,11 @@ type DataSourceJobJobSettingsSettingsNotificationSettings struct { NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` } +type DataSourceJobJobSettingsSettingsParameter struct { + Default string `json:"default,omitempty"` + Name string `json:"name,omitempty"` +} + type DataSourceJobJobSettingsSettingsPipelineTask struct { FullRefresh bool `json:"full_refresh,omitempty"` PipelineId string `json:"pipeline_id"` @@ -421,6 +428,11 @@ type DataSourceJobJobSettingsSettingsRunAs struct { UserName string `json:"user_name,omitempty"` } +type DataSourceJobJobSettingsSettingsRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type DataSourceJobJobSettingsSettingsSchedule struct { PauseStatus string `json:"pause_status,omitempty"` QuartzCronExpression string `json:"quartz_cron_expression"` @@ -573,6 +585,7 @@ type DataSourceJobJobSettingsSettingsTaskNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -682,6 +695,11 @@ type DataSourceJobJobSettingsSettingsTaskPythonWheelTask struct { Parameters []string `json:"parameters,omitempty"` } +type DataSourceJobJobSettingsSettingsTaskRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type DataSourceJobJobSettingsSettingsTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` @@ -760,6 +778,7 @@ type DataSourceJobJobSettingsSettingsTask struct { NotificationSettings *DataSourceJobJobSettingsSettingsTaskNotificationSettings `json:"notification_settings,omitempty"` PipelineTask *DataSourceJobJobSettingsSettingsTaskPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *DataSourceJobJobSettingsSettingsTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *DataSourceJobJobSettingsSettingsTaskRunJobTask `json:"run_job_task,omitempty"` SparkJarTask *DataSourceJobJobSettingsSettingsTaskSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *DataSourceJobJobSettingsSettingsTaskSparkPythonTask `json:"spark_python_task,omitempty"` SparkSubmitTask *DataSourceJobJobSettingsSettingsTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` @@ -821,10 +840,12 @@ type DataSourceJobJobSettingsSettings struct { NewCluster *DataSourceJobJobSettingsSettingsNewCluster `json:"new_cluster,omitempty"` NotebookTask *DataSourceJobJobSettingsSettingsNotebookTask `json:"notebook_task,omitempty"` NotificationSettings *DataSourceJobJobSettingsSettingsNotificationSettings `json:"notification_settings,omitempty"` + Parameter []DataSourceJobJobSettingsSettingsParameter `json:"parameter,omitempty"` PipelineTask *DataSourceJobJobSettingsSettingsPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *DataSourceJobJobSettingsSettingsPythonWheelTask `json:"python_wheel_task,omitempty"` Queue *DataSourceJobJobSettingsSettingsQueue `json:"queue,omitempty"` RunAs *DataSourceJobJobSettingsSettingsRunAs `json:"run_as,omitempty"` + RunJobTask *DataSourceJobJobSettingsSettingsRunJobTask `json:"run_job_task,omitempty"` Schedule *DataSourceJobJobSettingsSettingsSchedule `json:"schedule,omitempty"` SparkJarTask *DataSourceJobJobSettingsSettingsSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *DataSourceJobJobSettingsSettingsSparkPythonTask `json:"spark_python_task,omitempty"` diff --git a/bundle/internal/tf/schema/resource_cluster.go b/bundle/internal/tf/schema/resource_cluster.go index a95b8c13a..bb4e35824 100644 --- a/bundle/internal/tf/schema/resource_cluster.go +++ b/bundle/internal/tf/schema/resource_cluster.go @@ -68,6 +68,7 @@ type ResourceClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/resource_connection.go b/bundle/internal/tf/schema/resource_connection.go new file mode 100644 index 000000000..a249a5393 --- /dev/null +++ b/bundle/internal/tf/schema/resource_connection.go @@ -0,0 +1,15 @@ +// Generated from Databricks Terraform provider schema. DO NOT EDIT. + +package schema + +type ResourceConnection struct { + Comment string `json:"comment,omitempty"` + ConnectionType string `json:"connection_type"` + Id string `json:"id,omitempty"` + MetastoreId string `json:"metastore_id,omitempty"` + Name string `json:"name"` + Options map[string]string `json:"options"` + Owner string `json:"owner,omitempty"` + Properties map[string]string `json:"properties,omitempty"` + ReadOnly bool `json:"read_only,omitempty"` +} diff --git a/bundle/internal/tf/schema/resource_instance_pool.go b/bundle/internal/tf/schema/resource_instance_pool.go index 2c3221217..f524b3fce 100644 --- a/bundle/internal/tf/schema/resource_instance_pool.go +++ b/bundle/internal/tf/schema/resource_instance_pool.go @@ -26,6 +26,7 @@ type ResourceInstancePoolDiskSpec struct { type ResourceInstancePoolGcpAttributes struct { GcpAvailability string `json:"gcp_availability,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` } type ResourceInstancePoolInstancePoolFleetAttributesFleetOnDemandOption struct { diff --git a/bundle/internal/tf/schema/resource_job.go b/bundle/internal/tf/schema/resource_job.go index 77b681ee5..50101400a 100644 --- a/bundle/internal/tf/schema/resource_job.go +++ b/bundle/internal/tf/schema/resource_job.go @@ -124,6 +124,7 @@ type ResourceJobJobClusterNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -305,6 +306,7 @@ type ResourceJobNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -401,6 +403,11 @@ type ResourceJobNotificationSettings struct { NoAlertForSkippedRuns bool `json:"no_alert_for_skipped_runs,omitempty"` } +type ResourceJobParameter struct { + Default string `json:"default,omitempty"` + Name string `json:"name,omitempty"` +} + type ResourceJobPipelineTask struct { FullRefresh bool `json:"full_refresh,omitempty"` PipelineId string `json:"pipeline_id"` @@ -421,6 +428,11 @@ type ResourceJobRunAs struct { UserName string `json:"user_name,omitempty"` } +type ResourceJobRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type ResourceJobSchedule struct { PauseStatus string `json:"pause_status,omitempty"` QuartzCronExpression string `json:"quartz_cron_expression"` @@ -573,6 +585,7 @@ type ResourceJobTaskNewClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` BootDiskSize int `json:"boot_disk_size,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` UsePreemptibleExecutors bool `json:"use_preemptible_executors,omitempty"` ZoneId string `json:"zone_id,omitempty"` } @@ -682,6 +695,11 @@ type ResourceJobTaskPythonWheelTask struct { Parameters []string `json:"parameters,omitempty"` } +type ResourceJobTaskRunJobTask struct { + JobId string `json:"job_id"` + JobParameters map[string]string `json:"job_parameters,omitempty"` +} + type ResourceJobTaskSparkJarTask struct { JarUri string `json:"jar_uri,omitempty"` MainClassName string `json:"main_class_name,omitempty"` @@ -760,6 +778,7 @@ type ResourceJobTask struct { NotificationSettings *ResourceJobTaskNotificationSettings `json:"notification_settings,omitempty"` PipelineTask *ResourceJobTaskPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *ResourceJobTaskPythonWheelTask `json:"python_wheel_task,omitempty"` + RunJobTask *ResourceJobTaskRunJobTask `json:"run_job_task,omitempty"` SparkJarTask *ResourceJobTaskSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *ResourceJobTaskSparkPythonTask `json:"spark_python_task,omitempty"` SparkSubmitTask *ResourceJobTaskSparkSubmitTask `json:"spark_submit_task,omitempty"` @@ -825,10 +844,12 @@ type ResourceJob struct { NewCluster *ResourceJobNewCluster `json:"new_cluster,omitempty"` NotebookTask *ResourceJobNotebookTask `json:"notebook_task,omitempty"` NotificationSettings *ResourceJobNotificationSettings `json:"notification_settings,omitempty"` + Parameter []ResourceJobParameter `json:"parameter,omitempty"` PipelineTask *ResourceJobPipelineTask `json:"pipeline_task,omitempty"` PythonWheelTask *ResourceJobPythonWheelTask `json:"python_wheel_task,omitempty"` Queue *ResourceJobQueue `json:"queue,omitempty"` RunAs *ResourceJobRunAs `json:"run_as,omitempty"` + RunJobTask *ResourceJobRunJobTask `json:"run_job_task,omitempty"` Schedule *ResourceJobSchedule `json:"schedule,omitempty"` SparkJarTask *ResourceJobSparkJarTask `json:"spark_jar_task,omitempty"` SparkPythonTask *ResourceJobSparkPythonTask `json:"spark_python_task,omitempty"` diff --git a/bundle/internal/tf/schema/resource_model_serving.go b/bundle/internal/tf/schema/resource_model_serving.go index b7ff88ccd..cc5c32570 100644 --- a/bundle/internal/tf/schema/resource_model_serving.go +++ b/bundle/internal/tf/schema/resource_model_serving.go @@ -4,6 +4,7 @@ package schema type ResourceModelServingConfigServedModels struct { EnvironmentVars map[string]string `json:"environment_vars,omitempty"` + InstanceProfileArn string `json:"instance_profile_arn,omitempty"` ModelName string `json:"model_name"` ModelVersion string `json:"model_version"` Name string `json:"name,omitempty"` diff --git a/bundle/internal/tf/schema/resource_pipeline.go b/bundle/internal/tf/schema/resource_pipeline.go index 9e7f71b11..5c5de9a7e 100644 --- a/bundle/internal/tf/schema/resource_pipeline.go +++ b/bundle/internal/tf/schema/resource_pipeline.go @@ -47,6 +47,7 @@ type ResourcePipelineClusterClusterLogConf struct { type ResourcePipelineClusterGcpAttributes struct { Availability string `json:"availability,omitempty"` GoogleServiceAccount string `json:"google_service_account,omitempty"` + LocalSsdCount int `json:"local_ssd_count,omitempty"` ZoneId string `json:"zone_id,omitempty"` } diff --git a/bundle/internal/tf/schema/root.go b/bundle/internal/tf/schema/root.go index 9cfe8491d..3beb30072 100644 --- a/bundle/internal/tf/schema/root.go +++ b/bundle/internal/tf/schema/root.go @@ -24,7 +24,7 @@ func NewRoot() *Root { "required_providers": map[string]interface{}{ "databricks": map[string]interface{}{ "source": "databricks/databricks", - "version": ">= 1.0.0", + "version": "1.23.0", }, }, }, From 707fd6f617a7c7837d493ab9349a591a45f1cdd7 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Wed, 30 Aug 2023 07:01:08 -0700 Subject: [PATCH 085/139] Cleanup after "Add a foundation for built-in templates" (#707) ## Changes Add some cleanup based on @pietern's comments on https://github.com/databricks/cli/pull/685 --- bundle/config/mutator/process_target_mode.go | 5 ++++- cmd/root/bundle.go | 4 ++-- libs/auth/service_principal.go | 8 ++++++-- libs/template/helpers.go | 5 ++++- libs/template/materialize.go | 4 +++- 5 files changed, 19 insertions(+), 7 deletions(-) diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index 3a00d42fc..be93512bb 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -160,7 +160,10 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { } return transformDevelopmentMode(b) case config.Production: - isPrincipal := auth.IsServicePrincipal(ctx, b.WorkspaceClient(), b.Config.Workspace.CurrentUser.Id) + isPrincipal, err := auth.IsServicePrincipal(ctx, b.WorkspaceClient(), b.Config.Workspace.CurrentUser.Id) + if err != nil { + return err + } return validateProductionMode(ctx, b, isPrincipal) case "": // No action diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index fe97fbf22..10cce67a4 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -43,7 +43,7 @@ func getTarget(cmd *cobra.Command) (value string) { return target } -func GetProfile(cmd *cobra.Command) (value string) { +func getProfile(cmd *cobra.Command) (value string) { // The command line flag takes precedence. flag := cmd.Flag("profile") if flag != nil { @@ -70,7 +70,7 @@ func loadBundle(cmd *cobra.Command, args []string, load func(ctx context.Context return nil, nil } - profile := GetProfile(cmd) + profile := getProfile(cmd) if profile != "" { b.Config.Workspace.Profile = profile } diff --git a/libs/auth/service_principal.go b/libs/auth/service_principal.go index 58fcc6a7a..a6740b503 100644 --- a/libs/auth/service_principal.go +++ b/libs/auth/service_principal.go @@ -4,13 +4,17 @@ import ( "context" "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/apierr" ) // Determines whether a given user id is a service principal. // This function uses a heuristic: if no user exists with this id, we assume // it's a service principal. Unfortunately, the standard service principal API is too // slow for our purposes. -func IsServicePrincipal(ctx context.Context, ws *databricks.WorkspaceClient, userId string) bool { +func IsServicePrincipal(ctx context.Context, ws *databricks.WorkspaceClient, userId string) (bool, error) { _, err := ws.Users.GetById(ctx, userId) - return err != nil + if apierr.IsMissing(err) { + return true, nil + } + return false, err } diff --git a/libs/template/helpers.go b/libs/template/helpers.go index b8f2fe456..f947d9ba8 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -104,7 +104,10 @@ func loadHelpers(ctx context.Context) template.FuncMap { return false, err } } - result := auth.IsServicePrincipal(ctx, w, user.Id) + result, err := auth.IsServicePrincipal(ctx, w, user.Id) + if err != nil { + return false, err + } is_service_principal = &result return result, nil }, diff --git a/libs/template/materialize.go b/libs/template/materialize.go index 5422160df..8517858fd 100644 --- a/libs/template/materialize.go +++ b/libs/template/materialize.go @@ -7,6 +7,8 @@ import ( "os" "path" "path/filepath" + + "github.com/databricks/cli/libs/cmdio" ) const libraryDirName = "library" @@ -80,7 +82,7 @@ func Materialize(ctx context.Context, configFilePath, templateRoot, outputDir st if err != nil { return err } - println("✨ Successfully initialized template") + cmdio.LogString(ctx, "✨ Successfully initialized template") return nil } From 46b999ed426ac122a0b915ad8a49bd7eec809493 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 30 Aug 2023 16:08:37 +0200 Subject: [PATCH 086/139] Pin Terraform binary version to 1.5.5 (#715) ## Changes The installer doesn't respect the version constraints if they are specified. Source: [the vc argument is not used](https://github.com/hashicorp/hc-install/blob/850464c6016513fc7ad47114d010080ec16f32cb/releases/latest_version.go#L158-L177). ## Tests Confirmed manually. --- bundle/deploy/terraform/init.go | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 924c1f090..6df7b8d48 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -55,10 +55,10 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con } // Download Terraform to private bin directory. - installer := &releases.LatestVersion{ - Product: product.Terraform, - Constraints: version.MustConstraints(version.NewConstraint("<=1.5.5")), - InstallDir: binDir, + installer := &releases.ExactVersion{ + Product: product.Terraform, + Version: version.Must(version.NewVersion("1.5.5")), + InstallDir: binDir, } execPath, err = installer.Install(ctx) if err != nil { From a548eba492883866e49157d24fd252f82f0029c0 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 30 Aug 2023 16:09:15 +0200 Subject: [PATCH 087/139] Test transform when no Python wheel tasks defined (#714) ## Changes Fixed panic from Python transform when no python wheel tasks defined ## Tests Added regression test --- bundle/python/transform_test.go | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index c7b1f36e7..fb2c23e42 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -1,6 +1,7 @@ package python import ( + "context" "strings" "testing" @@ -99,3 +100,34 @@ func TestTransformFiltersWheelTasksOnly(t *testing.T) { require.Equal(t, "key1", tasks[0].Task.TaskKey) require.NotNil(t, tasks[0].Task.PythonWheelTask) } + +func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { + tmpDir := t.TempDir() + b := &bundle.Bundle{ + Config: config.Root{ + Path: tmpDir, + Bundle: config.Bundle{ + Target: "development", + }, + Resources: config.Resources{ + Jobs: map[string]*resources.Job{ + "test": { + Paths: resources.Paths{ + ConfigFilePath: tmpDir, + }, + JobSettings: &jobs.JobSettings{ + Tasks: []jobs.Task{ + { + TaskKey: "notebook_task", + NotebookTask: &jobs.NotebookTask{}}, + }, + }, + }, + }, + }, + }, + } + trampoline := TransformWheelTask() + err := bundle.Apply(context.Background(), b, trampoline) + require.NoError(t, err) +} From deebaa89f7f4448963878e27e811e0908dda2ad7 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 30 Aug 2023 16:31:36 +0200 Subject: [PATCH 088/139] Release v0.203.3 (#716) Bundles: * Support cluster overrides with cluster_key and compute_key ([#696](https://github.com/databricks/cli/pull/696)). * Allow referencing local Python wheels without artifacts section defined ([#703](https://github.com/databricks/cli/pull/703)). * Fixed --environment flag ([#705](https://github.com/databricks/cli/pull/705)). * Correctly identify local paths in libraries section ([#702](https://github.com/databricks/cli/pull/702)). * Fixed path joining in FindFilesWithSuffixInPath ([#704](https://github.com/databricks/cli/pull/704)). * Added transformation mutator for Python wheel task for them to work on DBR <13.1 ([#635](https://github.com/databricks/cli/pull/635)). Internal: * Add a foundation for built-in templates ([#685](https://github.com/databricks/cli/pull/685)). * Test transform when no Python wheel tasks defined ([#714](https://github.com/databricks/cli/pull/714)). * Pin Terraform binary version to 1.5.5 ([#715](https://github.com/databricks/cli/pull/715)). * Cleanup after "Add a foundation for built-in templates" ([#707](https://github.com/databricks/cli/pull/707)). * Filter down to Python wheel tasks only for trampoline ([#712](https://github.com/databricks/cli/pull/712)). * Update Terraform provider schema structs from 1.23.0 ([#713](https://github.com/databricks/cli/pull/713)). --- CHANGELOG.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa0dec134..6fcbab8ce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Version changelog +## 0.203.3 + +Bundles: + * Support cluster overrides with cluster_key and compute_key ([#696](https://github.com/databricks/cli/pull/696)). + * Allow referencing local Python wheels without artifacts section defined ([#703](https://github.com/databricks/cli/pull/703)). + * Fixed --environment flag ([#705](https://github.com/databricks/cli/pull/705)). + * Correctly identify local paths in libraries section ([#702](https://github.com/databricks/cli/pull/702)). + * Fixed path joining in FindFilesWithSuffixInPath ([#704](https://github.com/databricks/cli/pull/704)). + * Added transformation mutator for Python wheel task for them to work on DBR <13.1 ([#635](https://github.com/databricks/cli/pull/635)). + +Internal: + * Add a foundation for built-in templates ([#685](https://github.com/databricks/cli/pull/685)). + * Test transform when no Python wheel tasks defined ([#714](https://github.com/databricks/cli/pull/714)). + * Pin Terraform binary version to 1.5.5 ([#715](https://github.com/databricks/cli/pull/715)). + * Cleanup after "Add a foundation for built-in templates" ([#707](https://github.com/databricks/cli/pull/707)). + * Filter down to Python wheel tasks only for trampoline ([#712](https://github.com/databricks/cli/pull/712)). + * Update Terraform provider schema structs from 1.23.0 ([#713](https://github.com/databricks/cli/pull/713)). + ## 0.203.2 CLI: From cc1038fbd575c1147459a10d98cbf7b6bfc2c746 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 30 Aug 2023 16:57:34 +0200 Subject: [PATCH 089/139] Upgrade to actions/setup-go@v4 (#718) ## Changes Version 4 enables caching by default so we no longer need to explicitly enable it: https://github.com/actions/setup-go#v4. The build cache only reuses a cache from a repo's default branch, which for this repository is `main`. After enabling the merge queue, we no longer run builds on the `main` branch after push, but on merge queue branches. With no more builds on the `main` branch there is no longer a cache to reuse. This change fixes that by making the `release(-snapshot)?` workflows use the same caching mechanism. These run off of the `main` branch, so the cache they save can be reused by builds triggered on PRs or from the merge queue. ## Tests We have to merge this to see if it works. --- .github/workflows/push.yml | 7 +++---- .github/workflows/release-snapshot.yml | 19 +------------------ .github/workflows/release.yml | 19 +------------------ 3 files changed, 5 insertions(+), 40 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 6f14fe881..3209ae93d 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -26,10 +26,9 @@ jobs: run: git fetch --prune --unshallow - name: Setup Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: 1.21.0 - cache: true - name: Set go env run: | @@ -54,9 +53,9 @@ jobs: uses: actions/checkout@v3 - name: Setup Go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: - go-version: 1.21 + go-version: 1.21.0 # No need to download cached dependencies when running gofmt. cache: false diff --git a/.github/workflows/release-snapshot.yml b/.github/workflows/release-snapshot.yml index 130d49dd0..fbf5421b4 100644 --- a/.github/workflows/release-snapshot.yml +++ b/.github/workflows/release-snapshot.yml @@ -19,27 +19,10 @@ jobs: run: git fetch --prune --unshallow - name: Setup Go - id: go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: 1.21.0 - - name: Locate cache paths - id: cache - run: | - echo "GOMODCACHE=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "GOCACHE=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - # Note: use custom caching because below performs a cross platform build - # through goreleaser and don't want to share a cache with the test builds. - - name: Setup caching - uses: actions/cache@v3 - with: - path: | - ${{ steps.cache.outputs.GOMODCACHE }} - ${{ steps.cache.outputs.GOCACHE }} - key: release-${{ runner.os }}-go-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', '.goreleaser.yaml') }} - - name: Hide snapshot tag to outsmart GoReleaser run: git tag -d snapshot || true diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5992dcb46..c166fc5b6 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,27 +18,10 @@ jobs: run: git fetch --prune --unshallow - name: Setup Go - id: go - uses: actions/setup-go@v3 + uses: actions/setup-go@v4 with: go-version: 1.21.0 - - name: Locate cache paths - id: cache - run: | - echo "GOMODCACHE=$(go env GOMODCACHE)" >> $GITHUB_OUTPUT - echo "GOCACHE=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - # Note: use custom caching because below performs a cross platform build - # through goreleaser and don't want to share a cache with the test builds. - - name: Setup caching - uses: actions/cache@v3 - with: - path: | - ${{ steps.cache.outputs.GOMODCACHE }} - ${{ steps.cache.outputs.GOCACHE }} - key: release-${{ runner.os }}-go-${{ steps.go.outputs.go-version }}-${{ hashFiles('go.sum', '.goreleaser.yaml') }} - - name: Run GoReleaser uses: goreleaser/goreleaser-action@v4 with: From 86c30dd3289751b30a232b415fcc2b4d76232187 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 31 Aug 2023 16:10:32 +0200 Subject: [PATCH 090/139] Fixed artifact file uploading on Windows and wheel execution on DBR 13.3 (#722) ## Changes Fixed artifact file uploading on Windows and wheel execution on DBR 13.3 Fixes #719, #720 ## Tests Added regression test for Windows --- bundle/artifacts/artifacts.go | 5 +- bundle/artifacts/artifacts_test.go | 89 ++++++++++++++++++++++++++++++ bundle/python/transform.go | 2 + 3 files changed, 94 insertions(+), 2 deletions(-) create mode 100644 bundle/artifacts/artifacts_test.go diff --git a/bundle/artifacts/artifacts.go b/bundle/artifacts/artifacts.go index c54131217..0331adb70 100644 --- a/bundle/artifacts/artifacts.go +++ b/bundle/artifacts/artifacts.go @@ -8,6 +8,7 @@ import ( "fmt" "os" "path" + "path/filepath" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts/whl" @@ -107,7 +108,7 @@ func uploadArtifact(ctx context.Context, a *config.Artifact, b *bundle.Bundle) e for i := range a.Files { f := &a.Files[i] if f.NeedsUpload() { - filename := path.Base(f.Source) + filename := filepath.Base(f.Source) cmdio.LogString(ctx, fmt.Sprintf("artifacts.Upload(%s): Uploading...", filename)) remotePath, err := uploadArtifactFile(ctx, f.Source, b) if err != nil { @@ -136,7 +137,7 @@ func uploadArtifactFile(ctx context.Context, file string, b *bundle.Bundle) (str } fileHash := sha256.Sum256(raw) - remotePath := path.Join(uploadPath, fmt.Sprintf("%x", fileHash), path.Base(file)) + remotePath := path.Join(uploadPath, fmt.Sprintf("%x", fileHash), filepath.Base(file)) // Make sure target directory exists. err = b.WorkspaceClient().Workspace.MkdirsByPath(ctx, path.Dir(remotePath)) if err != nil { diff --git a/bundle/artifacts/artifacts_test.go b/bundle/artifacts/artifacts_test.go new file mode 100644 index 000000000..65a1950ae --- /dev/null +++ b/bundle/artifacts/artifacts_test.go @@ -0,0 +1,89 @@ +package artifacts + +import ( + "context" + "os" + "path/filepath" + "regexp" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/workspace" + "github.com/stretchr/testify/require" +) + +func touchEmptyFile(t *testing.T, path string) { + err := os.MkdirAll(filepath.Dir(path), 0700) + require.NoError(t, err) + f, err := os.Create(path) + require.NoError(t, err) + f.Close() +} + +type MockWorkspaceService struct { +} + +// Delete implements workspace.WorkspaceService. +func (MockWorkspaceService) Delete(ctx context.Context, request workspace.Delete) error { + panic("unimplemented") +} + +// Export implements workspace.WorkspaceService. +func (MockWorkspaceService) Export(ctx context.Context, request workspace.ExportRequest) (*workspace.ExportResponse, error) { + panic("unimplemented") +} + +// GetStatus implements workspace.WorkspaceService. +func (MockWorkspaceService) GetStatus(ctx context.Context, request workspace.GetStatusRequest) (*workspace.ObjectInfo, error) { + panic("unimplemented") +} + +// Import implements workspace.WorkspaceService. +func (MockWorkspaceService) Import(ctx context.Context, request workspace.Import) error { + return nil +} + +// List implements workspace.WorkspaceService. +func (MockWorkspaceService) List(ctx context.Context, request workspace.ListWorkspaceRequest) (*workspace.ListResponse, error) { + panic("unimplemented") +} + +// Mkdirs implements workspace.WorkspaceService. +func (MockWorkspaceService) Mkdirs(ctx context.Context, request workspace.Mkdirs) error { + return nil +} + +func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { + dir := t.TempDir() + whlPath := filepath.Join(dir, "dist", "test.whl") + touchEmptyFile(t, whlPath) + b := &bundle.Bundle{ + Config: config.Root{ + Path: dir, + Bundle: config.Bundle{ + Target: "whatever", + }, + Workspace: config.Workspace{ + ArtifactsPath: "/Users/test@databricks.com/whatever", + }, + }, + } + + b.WorkspaceClient().Workspace.WithImpl(MockWorkspaceService{}) + artifact := &config.Artifact{ + Files: []config.ArtifactFile{ + { + Source: whlPath, + Libraries: []*compute.Library{ + {Whl: "dist\\test.whl"}, + }, + }, + }, + } + + err := uploadArtifact(context.Background(), artifact, b) + require.NoError(t, err) + require.Regexp(t, regexp.MustCompile("/Users/test@databricks.com/whatever/.internal/[a-z0-9]+/test.whl"), artifact.Files[0].RemotePath) +} diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 6ec75a038..53db450b5 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -16,6 +16,8 @@ const NOTEBOOK_TEMPLATE = `# Databricks notebook source %pip install --force-reinstall {{.Whl}} {{end}} +dbutils.library.restartPython() + try: from importlib import metadata except ImportError: # for Python<3.8 From e22fd73b7d23ca96bd733a8552018cb6915e2fd2 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Mon, 4 Sep 2023 00:07:17 -0700 Subject: [PATCH 091/139] Cleanup after previous PR comments (#724) ## Changes @pietern this addresses a comment from you on a recently merged PR. It also updates settings.json based on the settings VS Code adds as soon as you edit a notebook. --- .vscode/settings.json | 4 +++- bundle/config/mutator/override_compute_test.go | 9 ++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index 687e0fc02..869465286 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -8,5 +8,7 @@ "files.trimFinalNewlines": true, "python.envFile": "${workspaceFolder}/.databricks/.databricks.env", "databricks.python.envFile": "${workspaceFolder}/.env", - "python.analysis.stubPath": ".vscode" + "python.analysis.stubPath": ".vscode", + "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", + "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------" } diff --git a/bundle/config/mutator/override_compute_test.go b/bundle/config/mutator/override_compute_test.go index f04c91c46..cb37eeb5f 100644 --- a/bundle/config/mutator/override_compute_test.go +++ b/bundle/config/mutator/override_compute_test.go @@ -2,7 +2,6 @@ package mutator_test import ( "context" - "os" "testing" "github.com/databricks/cli/bundle" @@ -16,7 +15,7 @@ import ( ) func TestOverrideDevelopment(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "") + t.Setenv("DATABRICKS_CLUSTER_ID", "") bundle := &bundle.Bundle{ Config: config.Root{ Bundle: config.Bundle{ @@ -62,7 +61,7 @@ func TestOverrideDevelopment(t *testing.T) { } func TestOverrideDevelopmentEnv(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") bundle := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ @@ -90,7 +89,7 @@ func TestOverrideDevelopmentEnv(t *testing.T) { } func TestOverridePipelineTask(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") bundle := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ @@ -144,7 +143,7 @@ func TestOverrideProduction(t *testing.T) { } func TestOverrideProductionEnv(t *testing.T) { - os.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") + t.Setenv("DATABRICKS_CLUSTER_ID", "newClusterId") bundle := &bundle.Bundle{ Config: config.Root{ Resources: config.Resources{ From 83443bae8d8ad4df3758f4192c6bbe613faae9c4 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Mon, 4 Sep 2023 11:55:01 +0200 Subject: [PATCH 092/139] Make resource and artifact paths in bundle config relative to config folder (#708) # Warning: breaking change ## Changes Instead of having paths in bundle config files be relative to bundle root even if the config file is nested, this PR makes such paths relative to the folder where the config is located. When bundle is initialised, these paths will be transformed to relative paths based on bundle root. For example, we have file structure like this ``` - mybundle | - bundle.yml | - subfolder | -- resource.yml | -- my.whl ``` Previously, we had to reference `my.whl` in resource.yml like this, which was confusing because resource.yml is in the same subfolder ``` sync: include: - ./subfolder/*.whl ... tasks: - task_key: name libraries: - whl: ./subfolder/my.whl ... ``` After the change we can reference it like this (which is in line with the current behaviour for notebooks) ``` sync: include: - ./*.whl ... tasks: - task_key: name libraries: - whl: ./my.whl ... ``` ## Tests Existing `translate_path_tests` successfully passed after refactoring. Added a couple of uses cases for `Libraries` paths. Added a bundle config tests with include config and sync section --------- Co-authored-by: Pieter Noordhuis --- bundle/config/artifact.go | 11 ++ bundle/config/mutator/trampoline_test.go | 3 +- bundle/config/mutator/translate_paths.go | 147 ++++++++---------- .../mutator/translate_paths_artifacts.go | 42 +++++ bundle/config/mutator/translate_paths_jobs.go | 103 ++++++++++++ .../mutator/translate_paths_pipelines.go | 60 +++++++ bundle/config/mutator/translate_paths_test.go | 79 ++++++++-- .../{resources/pkg.go => paths/paths.go} | 2 +- bundle/config/resources/job.go | 3 +- bundle/config/resources/mlflow_experiment.go | 7 +- bundle/config/resources/mlflow_model.go | 7 +- bundle/config/resources/pipeline.go | 7 +- bundle/config/resources_test.go | 19 +-- bundle/config/root.go | 17 +- bundle/config/sync.go | 18 +++ bundle/config/target.go | 2 +- bundle/python/transform_test.go | 3 +- .../relative_path_with_includes/bundle.yml | 25 +++ .../subfolder/include.yml | 20 +++ .../tests/relative_path_with_includes_test.go | 28 ++++ 20 files changed, 482 insertions(+), 121 deletions(-) create mode 100644 bundle/config/mutator/translate_paths_artifacts.go create mode 100644 bundle/config/mutator/translate_paths_jobs.go create mode 100644 bundle/config/mutator/translate_paths_pipelines.go rename bundle/config/{resources/pkg.go => paths/paths.go} (95%) create mode 100644 bundle/tests/relative_path_with_includes/bundle.yml create mode 100644 bundle/tests/relative_path_with_includes/subfolder/include.yml create mode 100644 bundle/tests/relative_path_with_includes_test.go diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 60331eb13..1955e265d 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -8,9 +8,18 @@ import ( "path" "strings" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/service/compute" ) +type Artifacts map[string]*Artifact + +func (artifacts Artifacts) SetConfigFilePath(path string) { + for _, artifact := range artifacts { + artifact.ConfigFilePath = path + } +} + type ArtifactType string const ArtifactPythonWheel ArtifactType = `whl` @@ -34,6 +43,8 @@ type Artifact struct { // (Python wheel, Java jar and etc) itself Files []ArtifactFile `json:"files"` BuildCommand string `json:"build"` + + paths.Paths } func (a *Artifact) Build(ctx context.Context) ([]byte, error) { diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go index e523250e0..aec58618c 100644 --- a/bundle/config/mutator/trampoline_test.go +++ b/bundle/config/mutator/trampoline_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" @@ -64,7 +65,7 @@ func TestGenerateTrampoline(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "test": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: tmpDir, }, JobSettings: &jobs.JobSettings{ diff --git a/bundle/config/mutator/translate_paths.go b/bundle/config/mutator/translate_paths.go index 08f839861..acfd55258 100644 --- a/bundle/config/mutator/translate_paths.go +++ b/bundle/config/mutator/translate_paths.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "net/url" "os" "path" "path/filepath" @@ -11,8 +12,6 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/notebook" - "github.com/databricks/databricks-sdk-go/service/jobs" - "github.com/databricks/databricks-sdk-go/service/pipelines" ) type ErrIsNotebook struct { @@ -44,7 +43,9 @@ func (m *translatePaths) Name() string { return "TranslatePaths" } -// rewritePath converts a given relative path to a stable remote workspace path. +type rewriteFunc func(literal, localFullPath, localRelPath, remotePath string) (string, error) + +// rewritePath converts a given relative path from the loaded config to a new path based on the passed rewriting function // // It takes these arguments: // - The argument `dir` is the directory relative to which the given relative path is. @@ -57,13 +58,23 @@ func (m *translatePaths) rewritePath( dir string, b *bundle.Bundle, p *string, - fn func(literal, localPath, remotePath string) (string, error), + fn rewriteFunc, ) error { // We assume absolute paths point to a location in the workspace if path.IsAbs(filepath.ToSlash(*p)) { return nil } + url, err := url.Parse(*p) + if err != nil { + return err + } + + // If the file path has scheme, it's a full path and we don't need to transform it + if url.Scheme != "" { + return nil + } + // Local path is relative to the directory the resource was defined in. localPath := filepath.Join(dir, filepath.FromSlash(*p)) if interp, ok := m.seen[localPath]; ok { @@ -72,19 +83,19 @@ func (m *translatePaths) rewritePath( } // Remote path must be relative to the bundle root. - remotePath, err := filepath.Rel(b.Config.Path, localPath) + localRelPath, err := filepath.Rel(b.Config.Path, localPath) if err != nil { return err } - if strings.HasPrefix(remotePath, "..") { + if strings.HasPrefix(localRelPath, "..") { return fmt.Errorf("path %s is not contained in bundle root path", localPath) } // Prefix remote path with its remote root path. - remotePath = path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(remotePath)) + remotePath := path.Join(b.Config.Workspace.FilesPath, filepath.ToSlash(localRelPath)) // Convert local path into workspace path via specified function. - interp, err := fn(*p, localPath, filepath.ToSlash(remotePath)) + interp, err := fn(*p, localPath, localRelPath, filepath.ToSlash(remotePath)) if err != nil { return err } @@ -94,81 +105,69 @@ func (m *translatePaths) rewritePath( return nil } -func (m *translatePaths) translateNotebookPath(literal, localPath, remotePath string) (string, error) { - nb, _, err := notebook.Detect(localPath) +func translateNotebookPath(literal, localFullPath, localRelPath, remotePath string) (string, error) { + nb, _, err := notebook.Detect(localFullPath) if os.IsNotExist(err) { return "", fmt.Errorf("notebook %s not found", literal) } if err != nil { - return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localPath, err) + return "", fmt.Errorf("unable to determine if %s is a notebook: %w", localFullPath, err) } if !nb { - return "", ErrIsNotNotebook{localPath} + return "", ErrIsNotNotebook{localFullPath} } // Upon import, notebooks are stripped of their extension. - return strings.TrimSuffix(remotePath, filepath.Ext(localPath)), nil + return strings.TrimSuffix(remotePath, filepath.Ext(localFullPath)), nil } -func (m *translatePaths) translateFilePath(literal, localPath, remotePath string) (string, error) { - nb, _, err := notebook.Detect(localPath) +func translateFilePath(literal, localFullPath, localRelPath, remotePath string) (string, error) { + nb, _, err := notebook.Detect(localFullPath) if os.IsNotExist(err) { return "", fmt.Errorf("file %s not found", literal) } if err != nil { - return "", fmt.Errorf("unable to determine if %s is not a notebook: %w", localPath, err) + return "", fmt.Errorf("unable to determine if %s is not a notebook: %w", localFullPath, err) } if nb { - return "", ErrIsNotebook{localPath} + return "", ErrIsNotebook{localFullPath} } return remotePath, nil } -func (m *translatePaths) translateJobTask(dir string, b *bundle.Bundle, task *jobs.Task) error { - var err error - - if task.NotebookTask != nil { - err = m.rewritePath(dir, b, &task.NotebookTask.NotebookPath, m.translateNotebookPath) - if target := (&ErrIsNotNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a notebook for "tasks.notebook_task.notebook_path" but got a file: %w`, target) - } - if err != nil { - return err - } - } - - if task.SparkPythonTask != nil { - err = m.rewritePath(dir, b, &task.SparkPythonTask.PythonFile, m.translateFilePath) - if target := (&ErrIsNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a file for "tasks.spark_python_task.python_file" but got a notebook: %w`, target) - } - if err != nil { - return err - } - } - - return nil +func translateNoOp(literal, localFullPath, localRelPath, remotePath string) (string, error) { + return localRelPath, nil } -func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle, library *pipelines.PipelineLibrary) error { - var err error +type transformer struct { + // A directory path relative to which `path` will be transformed + dir string + // A path to transform + path *string + // Name of the config property where the path string is coming from + configPath string + // A function that performs the actual rewriting logic. + fn rewriteFunc +} - if library.Notebook != nil { - err = m.rewritePath(dir, b, &library.Notebook.Path, m.translateNotebookPath) - if target := (&ErrIsNotNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a notebook for "libraries.notebook.path" but got a file: %w`, target) - } - if err != nil { - return err - } - } +type transformFunc func(resource any, dir string) *transformer - if library.File != nil { - err = m.rewritePath(dir, b, &library.File.Path, m.translateFilePath) - if target := (&ErrIsNotebook{}); errors.As(err, target) { - return fmt.Errorf(`expected a file for "libraries.file.path" but got a notebook: %w`, target) +// Apply all matches transformers for the given resource +func (m *translatePaths) applyTransformers(funcs []transformFunc, b *bundle.Bundle, resource any, dir string) error { + for _, transformFn := range funcs { + transformer := transformFn(resource, dir) + if transformer == nil { + continue } + + err := m.rewritePath(transformer.dir, b, transformer.path, transformer.fn) if err != nil { + if target := (&ErrIsNotebook{}); errors.As(err, target) { + return fmt.Errorf(`expected a file for "%s" but got a notebook: %w`, transformer.configPath, target) + } + if target := (&ErrIsNotNotebook{}); errors.As(err, target) { + return fmt.Errorf(`expected a notebook for "%s" but got a file: %w`, transformer.configPath, target) + } return err } } @@ -179,36 +178,14 @@ func (m *translatePaths) translatePipelineLibrary(dir string, b *bundle.Bundle, func (m *translatePaths) Apply(_ context.Context, b *bundle.Bundle) error { m.seen = make(map[string]string) - for key, job := range b.Config.Resources.Jobs { - dir, err := job.ConfigFileDirectory() + for _, fn := range []func(*translatePaths, *bundle.Bundle) error{ + applyJobTransformers, + applyPipelineTransformers, + applyArtifactTransformers, + } { + err := fn(m, b) if err != nil { - return fmt.Errorf("unable to determine directory for job %s: %w", key, err) - } - - // Do not translate job task paths if using git source - if job.GitSource != nil { - continue - } - - for i := 0; i < len(job.Tasks); i++ { - err := m.translateJobTask(dir, b, &job.Tasks[i]) - if err != nil { - return err - } - } - } - - for key, pipeline := range b.Config.Resources.Pipelines { - dir, err := pipeline.ConfigFileDirectory() - if err != nil { - return fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) - } - - for i := 0; i < len(pipeline.Libraries); i++ { - err := m.translatePipelineLibrary(dir, b, &pipeline.Libraries[i]) - if err != nil { - return err - } + return err } } diff --git a/bundle/config/mutator/translate_paths_artifacts.go b/bundle/config/mutator/translate_paths_artifacts.go new file mode 100644 index 000000000..91e8397cb --- /dev/null +++ b/bundle/config/mutator/translate_paths_artifacts.go @@ -0,0 +1,42 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" +) + +func transformArtifactPath(resource any, dir string) *transformer { + artifact, ok := resource.(*config.Artifact) + if !ok { + return nil + } + + return &transformer{ + dir, + &artifact.Path, + "artifacts.path", + translateNoOp, + } +} + +func applyArtifactTransformers(m *translatePaths, b *bundle.Bundle) error { + artifactTransformers := []transformFunc{ + transformArtifactPath, + } + + for key, artifact := range b.Config.Artifacts { + dir, err := artifact.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for artifact %s: %w", key, err) + } + + err = m.applyTransformers(artifactTransformers, b, artifact, dir) + if err != nil { + return err + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_jobs.go b/bundle/config/mutator/translate_paths_jobs.go new file mode 100644 index 000000000..b94df5e2e --- /dev/null +++ b/bundle/config/mutator/translate_paths_jobs.go @@ -0,0 +1,103 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/compute" + "github.com/databricks/databricks-sdk-go/service/jobs" +) + +func transformNotebookTask(resource any, dir string) *transformer { + task, ok := resource.(*jobs.Task) + if !ok || task.NotebookTask == nil { + return nil + } + + return &transformer{ + dir, + &task.NotebookTask.NotebookPath, + "tasks.notebook_task.notebook_path", + translateNotebookPath, + } +} + +func transformSparkTask(resource any, dir string) *transformer { + task, ok := resource.(*jobs.Task) + if !ok || task.SparkPythonTask == nil { + return nil + } + + return &transformer{ + dir, + &task.SparkPythonTask.PythonFile, + "tasks.spark_python_task.python_file", + translateFilePath, + } +} + +func transformWhlLibrary(resource any, dir string) *transformer { + library, ok := resource.(*compute.Library) + if !ok || library.Whl == "" { + return nil + } + + return &transformer{ + dir, + &library.Whl, + "libraries.whl", + translateNoOp, + } +} + +func transformJarLibrary(resource any, dir string) *transformer { + library, ok := resource.(*compute.Library) + if !ok || library.Jar == "" { + return nil + } + + return &transformer{ + dir, + &library.Jar, + "libraries.jar", + translateFilePath, + } +} + +func applyJobTransformers(m *translatePaths, b *bundle.Bundle) error { + jobTransformers := []transformFunc{ + transformNotebookTask, + transformSparkTask, + transformWhlLibrary, + transformJarLibrary, + } + + for key, job := range b.Config.Resources.Jobs { + dir, err := job.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for job %s: %w", key, err) + } + + // Do not translate job task paths if using git source + if job.GitSource != nil { + continue + } + + for i := 0; i < len(job.Tasks); i++ { + task := &job.Tasks[i] + err := m.applyTransformers(jobTransformers, b, task, dir) + if err != nil { + return err + } + for j := 0; j < len(task.Libraries); j++ { + library := &task.Libraries[j] + err := m.applyTransformers(jobTransformers, b, library, dir) + if err != nil { + return err + } + } + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_pipelines.go b/bundle/config/mutator/translate_paths_pipelines.go new file mode 100644 index 000000000..1afdb9d51 --- /dev/null +++ b/bundle/config/mutator/translate_paths_pipelines.go @@ -0,0 +1,60 @@ +package mutator + +import ( + "fmt" + + "github.com/databricks/cli/bundle" + "github.com/databricks/databricks-sdk-go/service/pipelines" +) + +func transformLibraryNotebook(resource any, dir string) *transformer { + library, ok := resource.(*pipelines.PipelineLibrary) + if !ok || library.Notebook == nil { + return nil + } + + return &transformer{ + dir, + &library.Notebook.Path, + "libraries.notebook.path", + translateNotebookPath, + } +} + +func transformLibraryFile(resource any, dir string) *transformer { + library, ok := resource.(*pipelines.PipelineLibrary) + if !ok || library.File == nil { + return nil + } + + return &transformer{ + dir, + &library.File.Path, + "libraries.file.path", + translateFilePath, + } +} + +func applyPipelineTransformers(m *translatePaths, b *bundle.Bundle) error { + pipelineTransformers := []transformFunc{ + transformLibraryNotebook, + transformLibraryFile, + } + + for key, pipeline := range b.Config.Resources.Pipelines { + dir, err := pipeline.ConfigFileDirectory() + if err != nil { + return fmt.Errorf("unable to determine directory for pipeline %s: %w", key, err) + } + + for i := 0; i < len(pipeline.Libraries); i++ { + library := &pipeline.Libraries[i] + err := m.applyTransformers(pipelineTransformers, b, library, dir) + if err != nil { + return err + } + } + } + + return nil +} diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index b87f4f676..e7ac5e8af 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -9,7 +9,9 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/pipelines" "github.com/stretchr/testify/assert" @@ -43,7 +45,7 @@ func TestTranslatePathsSkippedWithGitSource(t *testing.T) { Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -103,6 +105,7 @@ func TestTranslatePaths(t *testing.T) { touchNotebookFile(t, filepath.Join(dir, "my_job_notebook.py")) touchNotebookFile(t, filepath.Join(dir, "my_pipeline_notebook.py")) touchEmptyFile(t, filepath.Join(dir, "my_python_file.py")) + touchEmptyFile(t, filepath.Join(dir, "dist", "task.jar")) bundle := &bundle.Bundle{ Config: config.Root{ @@ -113,7 +116,7 @@ func TestTranslatePaths(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -122,6 +125,9 @@ func TestTranslatePaths(t *testing.T) { NotebookTask: &jobs.NotebookTask{ NotebookPath: "./my_job_notebook.py", }, + Libraries: []compute.Library{ + {Whl: "./dist/task.whl"}, + }, }, { NotebookTask: &jobs.NotebookTask{ @@ -143,13 +149,29 @@ func TestTranslatePaths(t *testing.T) { PythonFile: "./my_python_file.py", }, }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorld", + }, + Libraries: []compute.Library{ + {Jar: "./dist/task.jar"}, + }, + }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorldRemote", + }, + Libraries: []compute.Library{ + {Jar: "dbfs:///bundle/dist/task_remote.jar"}, + }, + }, }, }, }, }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -194,6 +216,11 @@ func TestTranslatePaths(t *testing.T) { "/bundle/my_job_notebook", bundle.Config.Resources.Jobs["job"].Tasks[0].NotebookTask.NotebookPath, ) + assert.Equal( + t, + filepath.Join("dist", "task.whl"), + bundle.Config.Resources.Jobs["job"].Tasks[0].Libraries[0].Whl, + ) assert.Equal( t, "/Users/jane.doe@databricks.com/doesnt_exist.py", @@ -209,6 +236,16 @@ func TestTranslatePaths(t *testing.T) { "/bundle/my_python_file.py", bundle.Config.Resources.Jobs["job"].Tasks[4].SparkPythonTask.PythonFile, ) + assert.Equal( + t, + "/bundle/dist/task.jar", + bundle.Config.Resources.Jobs["job"].Tasks[5].Libraries[0].Jar, + ) + assert.Equal( + t, + "dbfs:///bundle/dist/task_remote.jar", + bundle.Config.Resources.Jobs["job"].Tasks[6].Libraries[0].Jar, + ) // Assert that the path in the libraries now refer to the artifact. assert.Equal( @@ -236,6 +273,7 @@ func TestTranslatePaths(t *testing.T) { func TestTranslatePathsInSubdirectories(t *testing.T) { dir := t.TempDir() touchEmptyFile(t, filepath.Join(dir, "job", "my_python_file.py")) + touchEmptyFile(t, filepath.Join(dir, "job", "dist", "task.jar")) touchEmptyFile(t, filepath.Join(dir, "pipeline", "my_python_file.py")) bundle := &bundle.Bundle{ @@ -247,7 +285,7 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "job/resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -257,13 +295,21 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { PythonFile: "./my_python_file.py", }, }, + { + SparkJarTask: &jobs.SparkJarTask{ + MainClassName: "HelloWorld", + }, + Libraries: []compute.Library{ + {Jar: "./dist/task.jar"}, + }, + }, }, }, }, }, Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "pipeline/resource.yml"), }, @@ -290,6 +336,11 @@ func TestTranslatePathsInSubdirectories(t *testing.T) { "/bundle/job/my_python_file.py", bundle.Config.Resources.Jobs["job"].Tasks[0].SparkPythonTask.PythonFile, ) + assert.Equal( + t, + "/bundle/job/dist/task.jar", + bundle.Config.Resources.Jobs["job"].Tasks[1].Libraries[0].Jar, + ) assert.Equal( t, @@ -310,7 +361,7 @@ func TestTranslatePathsOutsideBundleRoot(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "../resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -341,7 +392,7 @@ func TestJobNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, JobSettings: &jobs.JobSettings{ @@ -372,7 +423,7 @@ func TestJobFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, JobSettings: &jobs.JobSettings{ @@ -403,7 +454,7 @@ func TestPipelineNotebookDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -434,7 +485,7 @@ func TestPipelineFileDoesNotExistError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "fake.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -469,7 +520,7 @@ func TestJobSparkPythonTaskWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -504,7 +555,7 @@ func TestJobNotebookTaskWithFileSourceError(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "job": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, JobSettings: &jobs.JobSettings{ @@ -539,7 +590,7 @@ func TestPipelineNotebookLibraryWithFileSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ @@ -574,7 +625,7 @@ func TestPipelineFileLibraryWithNotebookSourceError(t *testing.T) { Resources: config.Resources{ Pipelines: map[string]*resources.Pipeline{ "pipeline": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: filepath.Join(dir, "resource.yml"), }, PipelineSpec: &pipelines.PipelineSpec{ diff --git a/bundle/config/resources/pkg.go b/bundle/config/paths/paths.go similarity index 95% rename from bundle/config/resources/pkg.go rename to bundle/config/paths/paths.go index 5cf54a06b..c2cbcb7dd 100644 --- a/bundle/config/resources/pkg.go +++ b/bundle/config/paths/paths.go @@ -1,4 +1,4 @@ -package resources +package paths import ( "fmt" diff --git a/bundle/config/resources/job.go b/bundle/config/resources/job.go index 6200062a8..66705afb2 100644 --- a/bundle/config/resources/job.go +++ b/bundle/config/resources/job.go @@ -1,6 +1,7 @@ package resources import ( + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/imdario/mergo" ) @@ -9,7 +10,7 @@ type Job struct { ID string `json:"id,omitempty" bundle:"readonly"` Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *jobs.JobSettings } diff --git a/bundle/config/resources/mlflow_experiment.go b/bundle/config/resources/mlflow_experiment.go index ebef039a8..d843cf226 100644 --- a/bundle/config/resources/mlflow_experiment.go +++ b/bundle/config/resources/mlflow_experiment.go @@ -1,11 +1,14 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/ml" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/ml" +) type MlflowExperiment struct { Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *ml.Experiment } diff --git a/bundle/config/resources/mlflow_model.go b/bundle/config/resources/mlflow_model.go index 31c72f6b0..92617c95a 100644 --- a/bundle/config/resources/mlflow_model.go +++ b/bundle/config/resources/mlflow_model.go @@ -1,11 +1,14 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/ml" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/ml" +) type MlflowModel struct { Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *ml.Model } diff --git a/bundle/config/resources/pipeline.go b/bundle/config/resources/pipeline.go index 96efc2c4f..d3a51c575 100644 --- a/bundle/config/resources/pipeline.go +++ b/bundle/config/resources/pipeline.go @@ -1,12 +1,15 @@ package resources -import "github.com/databricks/databricks-sdk-go/service/pipelines" +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/pipelines" +) type Pipeline struct { ID string `json:"id,omitempty" bundle:"readonly"` Permissions []Permission `json:"permissions,omitempty"` - Paths + paths.Paths *pipelines.PipelineSpec } diff --git a/bundle/config/resources_test.go b/bundle/config/resources_test.go index 63285bf94..82cb9f454 100644 --- a/bundle/config/resources_test.go +++ b/bundle/config/resources_test.go @@ -3,6 +3,7 @@ package config import ( "testing" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" "github.com/stretchr/testify/assert" ) @@ -11,21 +12,21 @@ func TestVerifyUniqueResourceIdentifiers(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, }, Experiments: map[string]*resources.MlflowExperiment{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, @@ -39,14 +40,14 @@ func TestVerifySafeMerge(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, @@ -55,7 +56,7 @@ func TestVerifySafeMerge(t *testing.T) { other := Resources{ Pipelines: map[string]*resources.Pipeline{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, @@ -69,14 +70,14 @@ func TestVerifySafeMergeForSameResourceType(t *testing.T) { r := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo.yml", }, }, }, Models: map[string]*resources.MlflowModel{ "bar": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "bar.yml", }, }, @@ -85,7 +86,7 @@ func TestVerifySafeMergeForSameResourceType(t *testing.T) { other := Resources{ Jobs: map[string]*resources.Job{ "foo": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: "foo2.yml", }, }, diff --git a/bundle/config/root.go b/bundle/config/root.go index 1275dab48..99ea33ad6 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -64,7 +64,7 @@ type Root struct { Workspace Workspace `json:"workspace,omitempty"` // Artifacts contains a description of all code artifacts in this bundle. - Artifacts map[string]*Artifact `json:"artifacts,omitempty"` + Artifacts Artifacts `json:"artifacts,omitempty"` // Resources contains a description of all Databricks resources // to deploy in this bundle (e.g. jobs, pipelines, etc.). @@ -113,6 +113,10 @@ func Load(path string) (*Root, error) { // was loaded from in configuration leafs that require it. func (r *Root) SetConfigFilePath(path string) { r.Resources.SetConfigFilePath(path) + if r.Artifacts != nil { + r.Artifacts.SetConfigFilePath(path) + } + if r.Targets != nil { for _, env := range r.Targets { if env == nil { @@ -121,6 +125,9 @@ func (r *Root) SetConfigFilePath(path string) { if env.Resources != nil { env.Resources.SetConfigFilePath(path) } + if env.Artifacts != nil { + env.Artifacts.SetConfigFilePath(path) + } } } } @@ -175,11 +182,17 @@ func (r *Root) Load(path string) error { } func (r *Root) Merge(other *Root) error { + err := r.Sync.Merge(r, other) + if err != nil { + return err + } + other.Sync = Sync{} + // TODO: when hooking into merge semantics, disallow setting path on the target instance. other.Path = "" // Check for safe merge, protecting against duplicate resource identifiers - err := r.Resources.VerifySafeMerge(&other.Resources) + err = r.Resources.VerifySafeMerge(&other.Resources) if err != nil { return err } diff --git a/bundle/config/sync.go b/bundle/config/sync.go index 0580e4c4f..6ba2603c4 100644 --- a/bundle/config/sync.go +++ b/bundle/config/sync.go @@ -1,5 +1,7 @@ package config +import "path/filepath" + type Sync struct { // Include contains a list of globs evaluated relative to the bundle root path // to explicitly include files that were excluded by the user's gitignore. @@ -11,3 +13,19 @@ type Sync struct { // 2) the `Include` field above. Exclude []string `json:"exclude,omitempty"` } + +func (s *Sync) Merge(root *Root, other *Root) error { + path, err := filepath.Rel(root.Path, other.Path) + if err != nil { + return err + } + for _, include := range other.Sync.Include { + s.Include = append(s.Include, filepath.Join(path, include)) + } + + for _, exclude := range other.Sync.Exclude { + s.Exclude = append(s.Exclude, filepath.Join(path, exclude)) + } + + return nil +} diff --git a/bundle/config/target.go b/bundle/config/target.go index 6a45fdb85..2489efc33 100644 --- a/bundle/config/target.go +++ b/bundle/config/target.go @@ -23,7 +23,7 @@ type Target struct { Workspace *Workspace `json:"workspace,omitempty"` - Artifacts map[string]*Artifact `json:"artifacts,omitempty"` + Artifacts Artifacts `json:"artifacts,omitempty"` Resources *Resources `json:"resources,omitempty"` diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index fb2c23e42..a9f57db8e 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" @@ -112,7 +113,7 @@ func TestNoPanicWithNoPythonWheelTasks(t *testing.T) { Resources: config.Resources{ Jobs: map[string]*resources.Job{ "test": { - Paths: resources.Paths{ + Paths: paths.Paths{ ConfigFilePath: tmpDir, }, JobSettings: &jobs.JobSettings{ diff --git a/bundle/tests/relative_path_with_includes/bundle.yml b/bundle/tests/relative_path_with_includes/bundle.yml new file mode 100644 index 000000000..36474c754 --- /dev/null +++ b/bundle/tests/relative_path_with_includes/bundle.yml @@ -0,0 +1,25 @@ +bundle: + name: sync_include + +include: + - "*/*.yml" + +sync: + include: + - ./folder_a/*.* + exclude: + - ./folder_b/*.* + +artifacts: + test_a: + type: whl + path: ./artifact_a + +resources: + jobs: + job_a: + name: "job_a" + tasks: + - task_key: "task_a" + libraries: + - whl: ./dist/job_a.whl diff --git a/bundle/tests/relative_path_with_includes/subfolder/include.yml b/bundle/tests/relative_path_with_includes/subfolder/include.yml new file mode 100644 index 000000000..597abe3bf --- /dev/null +++ b/bundle/tests/relative_path_with_includes/subfolder/include.yml @@ -0,0 +1,20 @@ +sync: + include: + - ./folder_c/*.* + exclude: + - ./folder_d/*.* + +artifacts: + test_b: + type: whl + path: ./artifact_b + + +resources: + jobs: + job_b: + name: "job_b" + tasks: + - task_key: "task_a" + libraries: + - whl: ./dist/job_b.whl diff --git a/bundle/tests/relative_path_with_includes_test.go b/bundle/tests/relative_path_with_includes_test.go new file mode 100644 index 000000000..92249c412 --- /dev/null +++ b/bundle/tests/relative_path_with_includes_test.go @@ -0,0 +1,28 @@ +package config_tests + +import ( + "context" + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/stretchr/testify/assert" +) + +func TestRelativePathsWithIncludes(t *testing.T) { + b := load(t, "./relative_path_with_includes") + + m := mutator.TranslatePaths() + err := bundle.Apply(context.Background(), b, m) + assert.NoError(t, err) + + assert.Equal(t, "artifact_a", b.Config.Artifacts["test_a"].Path) + assert.Equal(t, filepath.Join("subfolder", "artifact_b"), b.Config.Artifacts["test_b"].Path) + + assert.ElementsMatch(t, []string{"./folder_a/*.*", filepath.Join("subfolder", "folder_c", "*.*")}, b.Config.Sync.Include) + assert.ElementsMatch(t, []string{"./folder_b/*.*", filepath.Join("subfolder", "folder_d", "*.*")}, b.Config.Sync.Exclude) + + assert.Equal(t, filepath.Join("dist", "job_a.whl"), b.Config.Resources.Jobs["job_a"].Tasks[0].Libraries[0].Whl) + assert.Equal(t, filepath.Join("subfolder", "dist", "job_b.whl"), b.Config.Resources.Jobs["job_b"].Tasks[0].Libraries[0].Whl) +} From 437263eb583eacc99cd1b30499e85928c377c8a2 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 5 Sep 2023 10:27:18 +0200 Subject: [PATCH 093/139] Upgrade to actions/checkout@v4 (#731) ## Changes This should fix intermittent failures with v3 (see https://github.com/actions/checkout/issues/1448) --- .github/workflows/push.yml | 4 ++-- .github/workflows/release-snapshot.yml | 2 +- .github/workflows/release.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 3209ae93d..6194d4905 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -20,7 +20,7 @@ jobs: steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow @@ -50,7 +50,7 @@ jobs: steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Go uses: actions/setup-go@v4 diff --git a/.github/workflows/release-snapshot.yml b/.github/workflows/release-snapshot.yml index fbf5421b4..c3398a2bd 100644 --- a/.github/workflows/release-snapshot.yml +++ b/.github/workflows/release-snapshot.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c166fc5b6..cbab15ecc 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository and submodules - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Unshallow run: git fetch --prune --unshallow From 1752e29885f4fbcdcf1c9e1d17d18a5cafa3ec93 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 5 Sep 2023 11:43:57 +0200 Subject: [PATCH 094/139] Update Go SDK to v0.19.0 (#729) ## Changes * Update Go SDK to v0.19.0 * Update commands per OpenAPI spec from Go SDK * Incorporate `client.Do()` signature change to include a (nil) header map * Update `workspace.WorkspaceService` mock with permissions methods * Skip `files` service in codegen; already implemented under the `fs` command ## Tests Unit and integration tests pass. --- .codegen/_openapi_sha | 2 +- .codegen/cmds-workspace.go.tmpl | 10 +- .codegen/service.go.tmpl | 10 +- .gitattributes | 5 + bundle/artifacts/artifacts_test.go | 32 + .../custom-app-integration.go | 1 + cmd/account/groups/groups.go | 2 +- cmd/account/log-delivery/log-delivery.go | 38 +- cmd/account/metastores/metastores.go | 2 +- .../service-principals/service-principals.go | 2 +- cmd/account/users/users.go | 2 +- cmd/account/workspaces/workspaces.go | 18 +- cmd/api/api.go | 3 +- .../artifact-allowlists.go | 172 +++++ cmd/workspace/catalogs/catalogs.go | 2 + .../cluster-policies/cluster-policies.go | 323 ++++++++- cmd/workspace/clusters/clusters.go | 318 +++++++++ cmd/workspace/cmd.go | 10 + cmd/workspace/connections/connections.go | 2 +- .../dashboard-widgets/dashboard-widgets.go | 228 +++++++ cmd/workspace/dashboards/dashboards.go | 4 +- cmd/workspace/experiments/experiments.go | 429 +++++++++++- cmd/workspace/groups/groups.go | 2 +- .../instance-pools/instance-pools.go | 317 ++++++++- cmd/workspace/jobs/jobs.go | 315 +++++++++ cmd/workspace/libraries/libraries.go | 2 +- .../model-registry/model-registry.go | 267 ++++++++ .../model-versions/model-versions.go | 400 +++++++++++ cmd/workspace/permissions/permissions.go | 78 ++- cmd/workspace/pipelines/pipelines.go | 315 +++++++++ .../query-visualizations.go | 236 +++++++ .../registered-models/registered-models.go | 635 ++++++++++++++++++ cmd/workspace/repos/repos.go | 315 +++++++++ cmd/workspace/secrets/secrets.go | 73 ++ .../service-principals/service-principals.go | 2 +- .../serving-endpoints/serving-endpoints.go | 267 ++++++++ .../token-management/token-management.go | 247 +++++++ cmd/workspace/users/users.go | 249 ++++++- cmd/workspace/volumes/volumes.go | 12 +- cmd/workspace/warehouses/warehouses.go | 315 +++++++++ cmd/workspace/workspace/workspace.go | 271 ++++++++ go.mod | 12 +- go.sum | 24 +- internal/fs_mkdir_test.go | 2 +- internal/sync_test.go | 2 +- libs/filer/files_client.go | 17 +- libs/filer/workspace_files_client.go | 2 +- 47 files changed, 5893 insertions(+), 99 deletions(-) create mode 100755 cmd/workspace/artifact-allowlists/artifact-allowlists.go create mode 100755 cmd/workspace/dashboard-widgets/dashboard-widgets.go create mode 100755 cmd/workspace/model-versions/model-versions.go create mode 100755 cmd/workspace/query-visualizations/query-visualizations.go create mode 100755 cmd/workspace/registered-models/registered-models.go diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index c9e7a8be7..b59218d39 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -7b57ba3a53f4de3d049b6a24391fe5474212daf8 \ No newline at end of file +09a7fa63d9ae243e5407941f200960ca14d48b07 \ No newline at end of file diff --git a/.codegen/cmds-workspace.go.tmpl b/.codegen/cmds-workspace.go.tmpl index 013c62f88..a9daa05d8 100644 --- a/.codegen/cmds-workspace.go.tmpl +++ b/.codegen/cmds-workspace.go.tmpl @@ -2,7 +2,15 @@ package workspace -{{ $excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions" "account-access-control-proxy" }} +{{ $excludes := + list + "command-execution" + "statement-execution" + "dbfs" + "dbsql-permissions" + "account-access-control-proxy" + "files" +}} import ( "github.com/databricks/cli/cmd/root" diff --git a/.codegen/service.go.tmpl b/.codegen/service.go.tmpl index 91f2e5cf7..4ede142d2 100644 --- a/.codegen/service.go.tmpl +++ b/.codegen/service.go.tmpl @@ -10,7 +10,15 @@ import ( "github.com/spf13/cobra" ) -{{ $excludes := list "command-execution" "statement-execution" "dbfs" "dbsql-permissions" "account-access-control-proxy" }} +{{ $excludes := + list + "command-execution" + "statement-execution" + "dbfs" + "dbsql-permissions" + "account-access-control-proxy" + "files" +}} {{if not (in $excludes .KebabName) }} {{template "service" .}} diff --git a/.gitattributes b/.gitattributes index f9ea04180..3209a0f3f 100755 --- a/.gitattributes +++ b/.gitattributes @@ -25,6 +25,7 @@ cmd/account/vpc-endpoints/vpc-endpoints.go linguist-generated=true cmd/account/workspace-assignment/workspace-assignment.go linguist-generated=true cmd/account/workspaces/workspaces.go linguist-generated=true cmd/workspace/alerts/alerts.go linguist-generated=true +cmd/workspace/artifact-allowlists/artifact-allowlists.go linguist-generated=true cmd/workspace/catalogs/catalogs.go linguist-generated=true cmd/workspace/clean-rooms/clean-rooms.go linguist-generated=true cmd/workspace/cluster-policies/cluster-policies.go linguist-generated=true @@ -32,6 +33,7 @@ cmd/workspace/clusters/clusters.go linguist-generated=true cmd/workspace/cmd.go linguist-generated=true cmd/workspace/connections/connections.go linguist-generated=true cmd/workspace/current-user/current-user.go linguist-generated=true +cmd/workspace/dashboard-widgets/dashboard-widgets.go linguist-generated=true cmd/workspace/dashboards/dashboards.go linguist-generated=true cmd/workspace/data-sources/data-sources.go linguist-generated=true cmd/workspace/experiments/experiments.go linguist-generated=true @@ -48,14 +50,17 @@ cmd/workspace/jobs/jobs.go linguist-generated=true cmd/workspace/libraries/libraries.go linguist-generated=true cmd/workspace/metastores/metastores.go linguist-generated=true cmd/workspace/model-registry/model-registry.go linguist-generated=true +cmd/workspace/model-versions/model-versions.go linguist-generated=true cmd/workspace/permissions/permissions.go linguist-generated=true cmd/workspace/pipelines/pipelines.go linguist-generated=true cmd/workspace/policy-families/policy-families.go linguist-generated=true cmd/workspace/providers/providers.go linguist-generated=true cmd/workspace/queries/queries.go linguist-generated=true cmd/workspace/query-history/query-history.go linguist-generated=true +cmd/workspace/query-visualizations/query-visualizations.go linguist-generated=true cmd/workspace/recipient-activation/recipient-activation.go linguist-generated=true cmd/workspace/recipients/recipients.go linguist-generated=true +cmd/workspace/registered-models/registered-models.go linguist-generated=true cmd/workspace/repos/repos.go linguist-generated=true cmd/workspace/schemas/schemas.go linguist-generated=true cmd/workspace/secrets/secrets.go linguist-generated=true diff --git a/bundle/artifacts/artifacts_test.go b/bundle/artifacts/artifacts_test.go index 65a1950ae..4c0a18f38 100644 --- a/bundle/artifacts/artifacts_test.go +++ b/bundle/artifacts/artifacts_test.go @@ -55,6 +55,38 @@ func (MockWorkspaceService) Mkdirs(ctx context.Context, request workspace.Mkdirs return nil } +// GetPermissionLevels implements workspace.WorkspaceService. +func (MockWorkspaceService) GetPermissionLevels( + ctx context.Context, + request workspace.GetWorkspaceObjectPermissionLevelsRequest, +) (*workspace.GetWorkspaceObjectPermissionLevelsResponse, error) { + panic("unimplemented") +} + +// GetPermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) GetPermissions( + ctx context.Context, + request workspace.GetWorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + +// SetPermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) SetPermissions( + ctx context.Context, + request workspace.WorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + +// UpdatePermissions implements workspace.WorkspaceService. +func (MockWorkspaceService) UpdatePermissions( + ctx context.Context, + request workspace.WorkspaceObjectPermissionsRequest, +) (*workspace.WorkspaceObjectPermissions, error) { + panic("unimplemented") +} + func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { dir := t.TempDir() whlPath := filepath.Join(dir, "dist", "test.whl") diff --git a/cmd/account/custom-app-integration/custom-app-integration.go b/cmd/account/custom-app-integration/custom-app-integration.go index d7269bf47..e58688095 100755 --- a/cmd/account/custom-app-integration/custom-app-integration.go +++ b/cmd/account/custom-app-integration/custom-app-integration.go @@ -60,6 +60,7 @@ func newCreate() *cobra.Command { cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().BoolVar(&createReq.Confidential, "confidential", createReq.Confidential, `indicates if an oauth client-secret should be generated.`) + // TODO: array: scopes // TODO: complex arg: token_access_policy cmd.Use = "create" diff --git a/cmd/account/groups/groups.go b/cmd/account/groups/groups.go index 04298b49c..6e3b98c00 100755 --- a/cmd/account/groups/groups.go +++ b/cmd/account/groups/groups.go @@ -368,7 +368,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update group details.` diff --git a/cmd/account/log-delivery/log-delivery.go b/cmd/account/log-delivery/log-delivery.go index a7ed39dca..6323e0ddd 100755 --- a/cmd/account/log-delivery/log-delivery.go +++ b/cmd/account/log-delivery/log-delivery.go @@ -34,22 +34,21 @@ func New() *cobra.Command { 1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using Databricks APIs, call the Account API to create a - [storage configuration object](#operation/create-storage-config) that uses the - bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM - role. For full details, including the required IAM role policies and trust + [storage configuration object](:method:Storage/Create) that uses the bucket + name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. + For full details, including the required IAM role policies and trust relationship, see [Billable usage log delivery]. Using Databricks APIs, call the Account API to create a [credential configuration - object](#operation/create-credential-config) that uses the IAM role's ARN. 3. - **Create log delivery configuration**: Using Databricks APIs, call the Account - API to [create a log delivery - configuration](#operation/create-log-delivery-config) that uses the credential - and storage configuration objects from previous steps. You can specify if the - logs should include all events of that log type in your account (_Account - level_ delivery) or only events for a specific set of workspaces (_workspace - level_ delivery). Account level log delivery applies to all current and future - workspaces plus account level logs, while workspace level log delivery solely - delivers logs related to the specified workspaces. You can create multiple - types of delivery configurations per account. + object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create + log delivery configuration**: Using Databricks APIs, call the Account API to + [create a log delivery configuration](:method:LogDelivery/Create) that uses + the credential and storage configuration objects from previous steps. You can + specify if the logs should include all events of that log type in your account + (_Account level_ delivery) or only events for a specific set of workspaces + (_workspace level_ delivery). Account level log delivery applies to all + current and future workspaces plus account level logs, while workspace level + log delivery solely delivers logs related to the specified workspaces. You can + create multiple types of delivery configurations per account. For billable usage delivery: * For more information about billable usage logs, see [Billable usage log delivery]. For the CSV schema, see the [Usage page]. * @@ -120,10 +119,9 @@ func newCreate() *cobra.Command { Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs to your storage location. This requires that you - already created a [credential object](#operation/create-credential-config) - (which encapsulates a cross-account service IAM role) and a [storage - configuration object](#operation/create-storage-config) (which encapsulates an - S3 bucket). + already created a [credential object](:method:Credentials/Create) (which + encapsulates a cross-account service IAM role) and a [storage configuration + object](:method:Storage/Create) (which encapsulates an S3 bucket). For full details, including the required IAM role policies and bucket policies, see [Deliver and access billable usage logs] or [Configure audit @@ -140,7 +138,7 @@ func newCreate() *cobra.Command { You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log delivery - configuration](#operation/patch-log-delivery-config-status)). + configuration](:method:LogDelivery/PatchStatus)). [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html` @@ -368,7 +366,7 @@ func newPatchStatus() *cobra.Command { configurations is not supported, so disable log delivery configurations that are no longer needed. Note that you can't re-enable a delivery configuration if this would violate the delivery configuration limits described under - [Create log delivery](#operation/create-log-delivery-config).` + [Create log delivery](:method:LogDelivery/Create).` cmd.Annotations = make(map[string]string) diff --git a/cmd/account/metastores/metastores.go b/cmd/account/metastores/metastores.go index 185f3642b..726b779d8 100755 --- a/cmd/account/metastores/metastores.go +++ b/cmd/account/metastores/metastores.go @@ -255,7 +255,7 @@ func newList() *cobra.Command { cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { ctx := cmd.Context() a := root.AccountClient(ctx) - response, err := a.Metastores.List(ctx) + response, err := a.Metastores.ListAll(ctx) if err != nil { return err } diff --git a/cmd/account/service-principals/service-principals.go b/cmd/account/service-principals/service-principals.go index 481af9789..f5823c692 100755 --- a/cmd/account/service-principals/service-principals.go +++ b/cmd/account/service-principals/service-principals.go @@ -367,7 +367,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update service principal details.` diff --git a/cmd/account/users/users.go b/cmd/account/users/users.go index 7e84f90f7..375dd5b5b 100755 --- a/cmd/account/users/users.go +++ b/cmd/account/users/users.go @@ -375,7 +375,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update user details.` diff --git a/cmd/account/workspaces/workspaces.go b/cmd/account/workspaces/workspaces.go index 96ac33b69..60142a8a0 100755 --- a/cmd/account/workspaces/workspaces.go +++ b/cmd/account/workspaces/workspaces.go @@ -70,7 +70,10 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.Cloud, "cloud", createReq.Cloud, `The cloud provider which the workspace uses.`) // TODO: complex arg: cloud_resource_container cmd.Flags().StringVar(&createReq.CredentialsId, "credentials-id", createReq.CredentialsId, `ID of the workspace's credential configuration object.`) + // TODO: map via StringToStringVar: custom_tags cmd.Flags().StringVar(&createReq.DeploymentName, "deployment-name", createReq.DeploymentName, `The deployment name defines part of the subdomain for the workspace.`) + // TODO: complex arg: gcp_managed_network_config + // TODO: complex arg: gke_config cmd.Flags().StringVar(&createReq.Location, "location", createReq.Location, `The Google Cloud region of the workspace data plane in your Google account.`) cmd.Flags().StringVar(&createReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", createReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) cmd.Flags().StringVar(&createReq.NetworkId, "network-id", createReq.NetworkId, ``) @@ -391,6 +394,7 @@ func newUpdate() *cobra.Command { cmd := &cobra.Command{} var updateReq provisioning.UpdateWorkspaceRequest + var updateJson flags.JsonFlag var updateSkipWait bool var updateTimeout time.Duration @@ -398,9 +402,11 @@ func newUpdate() *cobra.Command { cmd.Flags().BoolVar(&updateSkipWait, "no-wait", updateSkipWait, `do not wait to reach RUNNING state`) cmd.Flags().DurationVar(&updateTimeout, "timeout", 20*time.Minute, `maximum amount of time to reach RUNNING state`) // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) cmd.Flags().StringVar(&updateReq.AwsRegion, "aws-region", updateReq.AwsRegion, `The AWS region of the workspace's data plane (for example, us-west-2).`) cmd.Flags().StringVar(&updateReq.CredentialsId, "credentials-id", updateReq.CredentialsId, `ID of the workspace's credential configuration object.`) + // TODO: map via StringToStringVar: custom_tags cmd.Flags().StringVar(&updateReq.ManagedServicesCustomerManagedKeyId, "managed-services-customer-managed-key-id", updateReq.ManagedServicesCustomerManagedKeyId, `The ID of the workspace's managed services encryption key configuration object.`) cmd.Flags().StringVar(&updateReq.NetworkId, "network-id", updateReq.NetworkId, `The ID of the workspace's network configuration object.`) cmd.Flags().StringVar(&updateReq.StorageConfigurationId, "storage-configuration-id", updateReq.StorageConfigurationId, `The ID of the workspace's storage configuration object.`) @@ -435,7 +441,8 @@ func newUpdate() *cobra.Command { support. You can add or update the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end or back-end - PrivateLink support on a workspace. + PrivateLink support on a workspace. - Custom tags. Given you provide an empty + custom tags, the update would not be applied. After calling the PATCH operation to update the workspace configuration, make repeated GET requests with the workspace ID and check the workspace @@ -473,7 +480,8 @@ func newUpdate() *cobra.Command { PrivateLink support. You can add or update the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of connectivity. You cannot remove (downgrade) any existing front-end or back-end - PrivateLink support on a workspace. + PrivateLink support on a workspace. - Custom tags. Given you provide an empty + custom tags, the update would not be applied. **Important**: To update a running workspace, your workspace must have no running compute resources that run in your workspace's VPC in the Classic data @@ -529,6 +537,12 @@ func newUpdate() *cobra.Command { ctx := cmd.Context() a := root.AccountClient(ctx) + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } if len(args) == 0 { promptSpinner := cmdio.Spinner(ctx) promptSpinner <- "No WORKSPACE_ID argument specified. Loading names for Workspaces drop-down." diff --git a/cmd/api/api.go b/cmd/api/api.go index 698781e63..11a5e3e36 100644 --- a/cmd/api/api.go +++ b/cmd/api/api.go @@ -60,7 +60,8 @@ func makeCommand(method string) *cobra.Command { } var response any - err = api.Do(cmd.Context(), method, path, request, &response) + headers := map[string]string{"Content-Type": "application/json"} + err = api.Do(cmd.Context(), method, path, headers, request, &response) if err != nil { return err } diff --git a/cmd/workspace/artifact-allowlists/artifact-allowlists.go b/cmd/workspace/artifact-allowlists/artifact-allowlists.go new file mode 100755 index 000000000..9f9b9be1c --- /dev/null +++ b/cmd/workspace/artifact-allowlists/artifact-allowlists.go @@ -0,0 +1,172 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package artifact_allowlists + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "artifact-allowlists", + Short: `In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the allowlist in UC so that users can leverage these artifacts on compute configured with shared access mode.`, + Long: `In Databricks Runtime 13.3 and above, you can add libraries and init scripts + to the allowlist in UC so that users can leverage these artifacts on compute + configured with shared access mode.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetArtifactAllowlistRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetArtifactAllowlistRequest + + // TODO: short flags + + cmd.Use = "get ARTIFACT_TYPE" + cmd.Short = `Get an artifact allowlist.` + cmd.Long = `Get an artifact allowlist. + + Get the artifact allowlist of a certain artifact type. The caller must be a + metastore admin.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + _, err = fmt.Sscan(args[0], &getReq.ArtifactType) + if err != nil { + return fmt.Errorf("invalid ARTIFACT_TYPE: %s", args[0]) + } + + response, err := w.ArtifactAllowlists.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.SetArtifactAllowlist, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.SetArtifactAllowlist + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update" + cmd.Short = `Set an artifact allowlist.` + cmd.Long = `Set an artifact allowlist. + + Set the artifact allowlist of a certain artifact type. The whole artifact + allowlist is replaced with the new allowlist. The caller must be a metastore + admin.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.ArtifactAllowlists.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service ArtifactAllowlists diff --git a/cmd/workspace/catalogs/catalogs.go b/cmd/workspace/catalogs/catalogs.go index c17f6c22e..5e06977c4 100755 --- a/cmd/workspace/catalogs/catalogs.go +++ b/cmd/workspace/catalogs/catalogs.go @@ -60,6 +60,7 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&createReq.ConnectionName, "connection-name", createReq.ConnectionName, `The name of the connection to an external data source.`) + // TODO: map via StringToStringVar: options // TODO: map via StringToStringVar: properties cmd.Flags().StringVar(&createReq.ProviderName, "provider-name", createReq.ProviderName, `The name of delta sharing provider.`) cmd.Flags().StringVar(&createReq.ShareName, "share-name", createReq.ShareName, `The name of the share under the share provider.`) @@ -321,6 +322,7 @@ func newUpdate() *cobra.Command { cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `User-provided free-form text description.`) cmd.Flags().Var(&updateReq.IsolationMode, "isolation-mode", `Whether the current securable is accessible from all workspaces or a specific set of workspaces.`) cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `Name of catalog.`) + // TODO: map via StringToStringVar: options cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `Username of current owner of catalog.`) // TODO: map via StringToStringVar: properties diff --git a/cmd/workspace/cluster-policies/cluster-policies.go b/cmd/workspace/cluster-policies/cluster-policies.go index 74a092cdb..0e3091945 100755 --- a/cmd/workspace/cluster-policies/cluster-policies.go +++ b/cmd/workspace/cluster-policies/cluster-policies.go @@ -318,8 +318,8 @@ func newGet() *cobra.Command { // TODO: short flags cmd.Use = "get POLICY_ID" - cmd.Short = `Get entity.` - cmd.Long = `Get entity. + cmd.Short = `Get a cluster policy.` + cmd.Long = `Get a cluster policy. Get a cluster policy entity. Creation and editing is available to admins only.` @@ -374,6 +374,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetClusterPolicyPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels CLUSTER_POLICY_ID" + cmd.Short = `Get cluster policy permission levels.` + cmd.Long = `Get cluster policy permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + getPermissionLevelsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetClusterPolicyPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetClusterPolicyPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions CLUSTER_POLICY_ID" + cmd.Short = `Get cluster policy permissions.` + cmd.Long = `Get cluster policy permissions. + + Gets the permissions of a cluster policy. Cluster policies can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + getPermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -396,8 +543,8 @@ func newList() *cobra.Command { cmd.Flags().Var(&listReq.SortOrder, "sort-order", `The order in which the policies get listed.`) cmd.Use = "list" - cmd.Short = `Get a cluster policy.` - cmd.Long = `Get a cluster policy. + cmd.Short = `List cluster policies.` + cmd.Long = `List cluster policies. Returns a list of policies accessible by the requesting user.` @@ -449,4 +596,172 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPolicyPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.ClusterPolicyPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions CLUSTER_POLICY_ID" + cmd.Short = `Set cluster policy permissions.` + cmd.Long = `Set cluster policy permissions. + + Sets permissions on a cluster policy. Cluster policies can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + setPermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPolicyPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.ClusterPolicyPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions CLUSTER_POLICY_ID" + cmd.Short = `Update cluster policy permissions.` + cmd.Long = `Update cluster policy permissions. + + Updates the permissions on a cluster policy. Cluster policies can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_POLICY_ID argument specified. Loading names for Cluster Policies drop-down." + names, err := w.ClusterPolicies.PolicyNameToPolicyIdMap(ctx, compute.ListClusterPoliciesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Cluster Policies drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster policy for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster policy for which to get or manage permissions") + } + updatePermissionsReq.ClusterPolicyId = args[0] + + response, err := w.ClusterPolicies.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service ClusterPolicies diff --git a/cmd/workspace/clusters/clusters.go b/cmd/workspace/clusters/clusters.go index 861730632..dce6753df 100755 --- a/cmd/workspace/clusters/clusters.go +++ b/cmd/workspace/clusters/clusters.go @@ -169,6 +169,8 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.ClusterName, "cluster-name", createReq.ClusterName, `Cluster name requested by the user.`) cmd.Flags().Var(&createReq.ClusterSource, "cluster-source", `Determines whether the cluster was created by a user through the UI, created by the Databricks Jobs Scheduler, or through an API request.`) // TODO: map via StringToStringVar: custom_tags + cmd.Flags().Var(&createReq.DataSecurityMode, "data-security-mode", `This describes an enum.`) + // TODO: complex arg: docker_image cmd.Flags().StringVar(&createReq.DriverInstancePoolId, "driver-instance-pool-id", createReq.DriverInstancePoolId, `The optional ID of the instance pool for the driver of the cluster belongs.`) cmd.Flags().StringVar(&createReq.DriverNodeTypeId, "driver-node-type-id", createReq.DriverNodeTypeId, `The node type of the Spark driver.`) cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) @@ -180,6 +182,7 @@ func newCreate() *cobra.Command { cmd.Flags().IntVar(&createReq.NumWorkers, "num-workers", createReq.NumWorkers, `Number of worker nodes that this cluster should have.`) cmd.Flags().StringVar(&createReq.PolicyId, "policy-id", createReq.PolicyId, `The ID of the cluster policy used to create the cluster if applicable.`) cmd.Flags().Var(&createReq.RuntimeEngine, "runtime-engine", `Decides which runtime engine to be use, e.g.`) + cmd.Flags().StringVar(&createReq.SingleUserName, "single-user-name", createReq.SingleUserName, `Single user name if data_security_mode is SINGLE_USER.`) // TODO: map via StringToStringVar: spark_conf // TODO: map via StringToStringVar: spark_env_vars // TODO: array: ssh_public_keys @@ -661,6 +664,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetClusterPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetClusterPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels CLUSTER_ID" + cmd.Short = `Get cluster permission levels.` + cmd.Long = `Get cluster permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + getPermissionLevelsReq.ClusterId = args[0] + + response, err := w.Clusters.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetClusterPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetClusterPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions CLUSTER_ID" + cmd.Short = `Get cluster permissions.` + cmd.Long = `Get cluster permissions. + + Gets the permissions of a cluster. Clusters can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + getPermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -1217,6 +1367,90 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.ClusterPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions CLUSTER_ID" + cmd.Short = `Set cluster permissions.` + cmd.Long = `Set cluster permissions. + + Sets permissions on a cluster. Clusters can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + setPermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start spark-versions command // Slice with functions to override default command behavior. @@ -1456,4 +1690,88 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.ClusterPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.ClusterPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions CLUSTER_ID" + cmd.Short = `Update cluster permissions.` + cmd.Long = `Update cluster permissions. + + Updates the permissions on a cluster. Clusters can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No CLUSTER_ID argument specified. Loading names for Clusters drop-down." + names, err := w.Clusters.ClusterDetailsClusterNameToClusterIdMap(ctx, compute.ListClustersRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Clusters drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The cluster for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the cluster for which to get or manage permissions") + } + updatePermissionsReq.ClusterId = args[0] + + response, err := w.Clusters.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Clusters diff --git a/cmd/workspace/cmd.go b/cmd/workspace/cmd.go index 74dcc3a58..495d8066d 100755 --- a/cmd/workspace/cmd.go +++ b/cmd/workspace/cmd.go @@ -4,12 +4,14 @@ package workspace import ( alerts "github.com/databricks/cli/cmd/workspace/alerts" + artifact_allowlists "github.com/databricks/cli/cmd/workspace/artifact-allowlists" catalogs "github.com/databricks/cli/cmd/workspace/catalogs" clean_rooms "github.com/databricks/cli/cmd/workspace/clean-rooms" cluster_policies "github.com/databricks/cli/cmd/workspace/cluster-policies" clusters "github.com/databricks/cli/cmd/workspace/clusters" connections "github.com/databricks/cli/cmd/workspace/connections" current_user "github.com/databricks/cli/cmd/workspace/current-user" + dashboard_widgets "github.com/databricks/cli/cmd/workspace/dashboard-widgets" dashboards "github.com/databricks/cli/cmd/workspace/dashboards" data_sources "github.com/databricks/cli/cmd/workspace/data-sources" experiments "github.com/databricks/cli/cmd/workspace/experiments" @@ -26,14 +28,17 @@ import ( libraries "github.com/databricks/cli/cmd/workspace/libraries" metastores "github.com/databricks/cli/cmd/workspace/metastores" model_registry "github.com/databricks/cli/cmd/workspace/model-registry" + model_versions "github.com/databricks/cli/cmd/workspace/model-versions" permissions "github.com/databricks/cli/cmd/workspace/permissions" pipelines "github.com/databricks/cli/cmd/workspace/pipelines" policy_families "github.com/databricks/cli/cmd/workspace/policy-families" providers "github.com/databricks/cli/cmd/workspace/providers" queries "github.com/databricks/cli/cmd/workspace/queries" query_history "github.com/databricks/cli/cmd/workspace/query-history" + query_visualizations "github.com/databricks/cli/cmd/workspace/query-visualizations" recipient_activation "github.com/databricks/cli/cmd/workspace/recipient-activation" recipients "github.com/databricks/cli/cmd/workspace/recipients" + registered_models "github.com/databricks/cli/cmd/workspace/registered-models" repos "github.com/databricks/cli/cmd/workspace/repos" schemas "github.com/databricks/cli/cmd/workspace/schemas" secrets "github.com/databricks/cli/cmd/workspace/secrets" @@ -59,12 +64,14 @@ func All() []*cobra.Command { var out []*cobra.Command out = append(out, alerts.New()) + out = append(out, artifact_allowlists.New()) out = append(out, catalogs.New()) out = append(out, clean_rooms.New()) out = append(out, cluster_policies.New()) out = append(out, clusters.New()) out = append(out, connections.New()) out = append(out, current_user.New()) + out = append(out, dashboard_widgets.New()) out = append(out, dashboards.New()) out = append(out, data_sources.New()) out = append(out, experiments.New()) @@ -81,14 +88,17 @@ func All() []*cobra.Command { out = append(out, libraries.New()) out = append(out, metastores.New()) out = append(out, model_registry.New()) + out = append(out, model_versions.New()) out = append(out, permissions.New()) out = append(out, pipelines.New()) out = append(out, policy_families.New()) out = append(out, providers.New()) out = append(out, queries.New()) out = append(out, query_history.New()) + out = append(out, query_visualizations.New()) out = append(out, recipient_activation.New()) out = append(out, recipients.New()) + out = append(out, registered_models.New()) out = append(out, repos.New()) out = append(out, schemas.New()) out = append(out, secrets.New()) diff --git a/cmd/workspace/connections/connections.go b/cmd/workspace/connections/connections.go index 7783b9eb7..c25825c9d 100755 --- a/cmd/workspace/connections/connections.go +++ b/cmd/workspace/connections/connections.go @@ -65,7 +65,7 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `User-provided free-form text description.`) cmd.Flags().StringVar(&createReq.Owner, "owner", createReq.Owner, `Username of current owner of the connection.`) - // TODO: map via StringToStringVar: properties_kvpairs + // TODO: map via StringToStringVar: properties cmd.Flags().BoolVar(&createReq.ReadOnly, "read-only", createReq.ReadOnly, `If the connection is read only.`) cmd.Use = "create" diff --git a/cmd/workspace/dashboard-widgets/dashboard-widgets.go b/cmd/workspace/dashboard-widgets/dashboard-widgets.go new file mode 100755 index 000000000..63e8d120c --- /dev/null +++ b/cmd/workspace/dashboard-widgets/dashboard-widgets.go @@ -0,0 +1,228 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package dashboard_widgets + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "dashboard-widgets", + Short: `This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace.`, + Long: `This is an evolving API that facilitates the addition and removal of widgets + from existing dashboards within the Databricks Workspace. Data structures may + change over time.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateWidget, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateWidget + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "create" + cmd.Short = `Add widget to a dashboard.` + cmd.Long = `Add widget to a dashboard.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.DashboardWidgets.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteDashboardWidgetRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteDashboardWidgetRequest + + // TODO: short flags + + cmd.Use = "delete ID" + cmd.Short = `Remove widget.` + cmd.Long = `Remove widget.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.DashboardWidgets.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.CreateWidget, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.CreateWidget + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update" + cmd.Short = `Update existing widget.` + cmd.Long = `Update existing widget.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.DashboardWidgets.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service DashboardWidgets diff --git a/cmd/workspace/dashboards/dashboards.go b/cmd/workspace/dashboards/dashboards.go index 3c48dc1b7..2335ee28e 100755 --- a/cmd/workspace/dashboards/dashboards.go +++ b/cmd/workspace/dashboards/dashboards.go @@ -58,14 +58,14 @@ func newCreate() *cobra.Command { // TODO: short flags cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) - cmd.Use = "create" + cmd.Use = "create NAME" cmd.Short = `Create a dashboard object.` cmd.Long = `Create a dashboard object.` cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { - check := cobra.ExactArgs(0) + check := cobra.ExactArgs(1) if cmd.Flags().Changed("json") { check = cobra.ExactArgs(0) } diff --git a/cmd/workspace/experiments/experiments.go b/cmd/workspace/experiments/experiments.go index 1e2ff9fa1..13087029d 100755 --- a/cmd/workspace/experiments/experiments.go +++ b/cmd/workspace/experiments/experiments.go @@ -352,6 +352,86 @@ func init() { }) } +// start delete-runs command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteRunsOverrides []func( + *cobra.Command, + *ml.DeleteRuns, +) + +func newDeleteRuns() *cobra.Command { + cmd := &cobra.Command{} + + var deleteRunsReq ml.DeleteRuns + var deleteRunsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&deleteRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().IntVar(&deleteRunsReq.MaxRuns, "max-runs", deleteRunsReq.MaxRuns, `An optional positive integer indicating the maximum number of runs to delete.`) + + cmd.Use = "delete-runs EXPERIMENT_ID MAX_TIMESTAMP_MILLIS" + cmd.Short = `Delete runs by creation time.` + cmd.Long = `Delete runs by creation time. + + Bulk delete runs in an experiment that were created prior to or at the + specified timestamp. Deletes at most max_runs per request.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = deleteRunsJson.Unmarshal(&deleteRunsReq) + if err != nil { + return err + } + } else { + deleteRunsReq.ExperimentId = args[0] + _, err = fmt.Sscan(args[1], &deleteRunsReq.MaxTimestampMillis) + if err != nil { + return fmt.Errorf("invalid MAX_TIMESTAMP_MILLIS: %s", args[1]) + } + } + + response, err := w.Experiments.DeleteRuns(ctx, deleteRunsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteRunsOverrides { + fn(cmd, &deleteRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteRuns()) + }) +} + // start delete-tag command // Slice with functions to override default command behavior. @@ -598,7 +678,7 @@ func newGetHistory() *cobra.Command { getHistoryReq.MetricKey = args[0] - response, err := w.Experiments.GetHistory(ctx, getHistoryReq) + response, err := w.Experiments.GetHistoryAll(ctx, getHistoryReq) if err != nil { return err } @@ -623,6 +703,129 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *ml.GetExperimentPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq ml.GetExperimentPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels EXPERIMENT_ID" + cmd.Short = `Get experiment permission levels.` + cmd.Long = `Get experiment permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.ExperimentId = args[0] + + response, err := w.Experiments.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *ml.GetExperimentPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq ml.GetExperimentPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions EXPERIMENT_ID" + cmd.Short = `Get experiment permissions.` + cmd.Long = `Get experiment permissions. + + Gets the permissions of an experiment. Experiments can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start get-run command // Slice with functions to override default command behavior. @@ -1428,6 +1631,86 @@ func init() { }) } +// start restore-runs command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var restoreRunsOverrides []func( + *cobra.Command, + *ml.RestoreRuns, +) + +func newRestoreRuns() *cobra.Command { + cmd := &cobra.Command{} + + var restoreRunsReq ml.RestoreRuns + var restoreRunsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&restoreRunsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().IntVar(&restoreRunsReq.MaxRuns, "max-runs", restoreRunsReq.MaxRuns, `An optional positive integer indicating the maximum number of runs to restore.`) + + cmd.Use = "restore-runs EXPERIMENT_ID MIN_TIMESTAMP_MILLIS" + cmd.Short = `Restore runs by deletion time.` + cmd.Long = `Restore runs by deletion time. + + Bulk restore runs in an experiment that were deleted no earlier than the + specified timestamp. Restores at most max_runs per request.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = restoreRunsJson.Unmarshal(&restoreRunsReq) + if err != nil { + return err + } + } else { + restoreRunsReq.ExperimentId = args[0] + _, err = fmt.Sscan(args[1], &restoreRunsReq.MinTimestampMillis) + if err != nil { + return fmt.Errorf("invalid MIN_TIMESTAMP_MILLIS: %s", args[1]) + } + } + + response, err := w.Experiments.RestoreRuns(ctx, restoreRunsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range restoreRunsOverrides { + fn(cmd, &restoreRunsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newRestoreRuns()) + }) +} + // start search-experiments command // Slice with functions to override default command behavior. @@ -1662,6 +1945,78 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *ml.ExperimentPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq ml.ExperimentPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions EXPERIMENT_ID" + cmd.Short = `Set experiment permissions.` + cmd.Long = `Set experiment permissions. + + Sets permissions on an experiment. Experiments can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start set-tag command // Slice with functions to override default command behavior. @@ -1815,6 +2170,78 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *ml.ExperimentPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq ml.ExperimentPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions EXPERIMENT_ID" + cmd.Short = `Update experiment permissions.` + cmd.Long = `Update experiment permissions. + + Updates the permissions on an experiment. Experiments can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.ExperimentId = args[0] + + response, err := w.Experiments.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // start update-run command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/groups/groups.go b/cmd/workspace/groups/groups.go index 48a9c9c68..3f46abbc8 100755 --- a/cmd/workspace/groups/groups.go +++ b/cmd/workspace/groups/groups.go @@ -368,7 +368,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update group details.` diff --git a/cmd/workspace/instance-pools/instance-pools.go b/cmd/workspace/instance-pools/instance-pools.go index 2a95437fb..9e7805aef 100755 --- a/cmd/workspace/instance-pools/instance-pools.go +++ b/cmd/workspace/instance-pools/instance-pools.go @@ -77,7 +77,6 @@ func newCreate() *cobra.Command { cmd.Flags().BoolVar(&createReq.EnableElasticDisk, "enable-elastic-disk", createReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes cmd.Flags().IntVar(&createReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", createReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) - // TODO: complex arg: instance_pool_fleet_attributes cmd.Flags().IntVar(&createReq.MaxCapacity, "max-capacity", createReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) cmd.Flags().IntVar(&createReq.MinIdleInstances, "min-idle-instances", createReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images @@ -247,7 +246,6 @@ func newEdit() *cobra.Command { cmd.Flags().BoolVar(&editReq.EnableElasticDisk, "enable-elastic-disk", editReq.EnableElasticDisk, `Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire additional disk space when its Spark workers are running low on disk space.`) // TODO: complex arg: gcp_attributes cmd.Flags().IntVar(&editReq.IdleInstanceAutoterminationMinutes, "idle-instance-autotermination-minutes", editReq.IdleInstanceAutoterminationMinutes, `Automatically terminates the extra instances in the pool cache after they are inactive for this time in minutes if min_idle_instances requirement is already met.`) - // TODO: complex arg: instance_pool_fleet_attributes cmd.Flags().IntVar(&editReq.MaxCapacity, "max-capacity", editReq.MaxCapacity, `Maximum number of outstanding instances to keep in the pool, including both instances used by clusters and idle instances.`) cmd.Flags().IntVar(&editReq.MinIdleInstances, "min-idle-instances", editReq.MinIdleInstances, `Minimum number of idle instances to keep in the instance pool.`) // TODO: array: preloaded_docker_images @@ -383,6 +381,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *compute.GetInstancePoolPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq compute.GetInstancePoolPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels INSTANCE_POOL_ID" + cmd.Short = `Get instance pool permission levels.` + cmd.Long = `Get instance pool permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + getPermissionLevelsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *compute.GetInstancePoolPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq compute.GetInstancePoolPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions INSTANCE_POOL_ID" + cmd.Short = `Get instance pool permissions.` + cmd.Long = `Get instance pool permissions. + + Gets the permissions of an instance pool. Instance pools can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + getPermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -431,4 +576,172 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *compute.InstancePoolPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq compute.InstancePoolPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions INSTANCE_POOL_ID" + cmd.Short = `Set instance pool permissions.` + cmd.Long = `Set instance pool permissions. + + Sets permissions on an instance pool. Instance pools can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + setPermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *compute.InstancePoolPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq compute.InstancePoolPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions INSTANCE_POOL_ID" + cmd.Short = `Update instance pool permissions.` + cmd.Long = `Update instance pool permissions. + + Updates the permissions on an instance pool. Instance pools can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No INSTANCE_POOL_ID argument specified. Loading names for Instance Pools drop-down." + names, err := w.InstancePools.InstancePoolAndStatsInstancePoolNameToInstancePoolIdMap(ctx) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Instance Pools drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The instance pool for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the instance pool for which to get or manage permissions") + } + updatePermissionsReq.InstancePoolId = args[0] + + response, err := w.InstancePools.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service InstancePools diff --git a/cmd/workspace/jobs/jobs.go b/cmd/workspace/jobs/jobs.go index 640f40016..7670ebb77 100755 --- a/cmd/workspace/jobs/jobs.go +++ b/cmd/workspace/jobs/jobs.go @@ -646,6 +646,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *jobs.GetJobPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq jobs.GetJobPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels JOB_ID" + cmd.Short = `Get job permission levels.` + cmd.Long = `Get job permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + getPermissionLevelsReq.JobId = args[0] + + response, err := w.Jobs.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *jobs.GetJobPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq jobs.GetJobPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions JOB_ID" + cmd.Short = `Get job permissions.` + cmd.Long = `Get job permissions. + + Gets the permissions of a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + getPermissionsReq.JobId = args[0] + + response, err := w.Jobs.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start get-run command // Slice with functions to override default command behavior. @@ -1285,6 +1432,90 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *jobs.JobPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq jobs.JobPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions JOB_ID" + cmd.Short = `Set job permissions.` + cmd.Long = `Set job permissions. + + Sets permissions on a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + setPermissionsReq.JobId = args[0] + + response, err := w.Jobs.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start submit command // Slice with functions to override default command behavior. @@ -1484,4 +1715,88 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *jobs.JobPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq jobs.JobPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions JOB_ID" + cmd.Short = `Update job permissions.` + cmd.Long = `Update job permissions. + + Updates the permissions on a job. Jobs can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No JOB_ID argument specified. Loading names for Jobs drop-down." + names, err := w.Jobs.BaseJobSettingsNameToJobIdMap(ctx, jobs.ListJobsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Jobs drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The job for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the job for which to get or manage permissions") + } + updatePermissionsReq.JobId = args[0] + + response, err := w.Jobs.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Jobs diff --git a/cmd/workspace/libraries/libraries.go b/cmd/workspace/libraries/libraries.go index e230bfb02..92671dc3e 100755 --- a/cmd/workspace/libraries/libraries.go +++ b/cmd/workspace/libraries/libraries.go @@ -154,7 +154,7 @@ func newClusterStatus() *cobra.Command { clusterStatusReq.ClusterId = args[0] - response, err := w.Libraries.ClusterStatus(ctx, clusterStatusReq) + response, err := w.Libraries.ClusterStatusAll(ctx, clusterStatusReq) if err != nil { return err } diff --git a/cmd/workspace/model-registry/model-registry.go b/cmd/workspace/model-registry/model-registry.go index d944e0d98..e2e552255 100755 --- a/cmd/workspace/model-registry/model-registry.go +++ b/cmd/workspace/model-registry/model-registry.go @@ -1226,6 +1226,129 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *ml.GetRegisteredModelPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq ml.GetRegisteredModelPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels REGISTERED_MODEL_ID" + cmd.Short = `Get registered model permission levels.` + cmd.Long = `Get registered model permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *ml.GetRegisteredModelPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq ml.GetRegisteredModelPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions REGISTERED_MODEL_ID" + cmd.Short = `Get registered model permissions.` + cmd.Long = `Get registered model permissions. + + Gets the permissions of a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list-models command // Slice with functions to override default command behavior. @@ -1902,6 +2025,78 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *ml.RegisteredModelPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq ml.RegisteredModelPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions REGISTERED_MODEL_ID" + cmd.Short = `Set registered model permissions.` + cmd.Long = `Set registered model permissions. + + Sets permissions on a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start test-registry-webhook command // Slice with functions to override default command behavior. @@ -2292,6 +2487,78 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *ml.RegisteredModelPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq ml.RegisteredModelPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions REGISTERED_MODEL_ID" + cmd.Short = `Update registered model permissions.` + cmd.Long = `Update registered model permissions. + + Updates the permissions on a registered model. Registered models can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.RegisteredModelId = args[0] + + response, err := w.ModelRegistry.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // start update-webhook command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/model-versions/model-versions.go b/cmd/workspace/model-versions/model-versions.go new file mode 100755 index 000000000..f62cddab0 --- /dev/null +++ b/cmd/workspace/model-versions/model-versions.go @@ -0,0 +1,400 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package model_versions + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "model-versions", + Short: `Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.`, + Long: `Databricks provides a hosted version of MLflow Model Registry in Unity + Catalog. Models in Unity Catalog provide centralized access control, auditing, + lineage, and discovery of ML models across Databricks workspaces. + + This API reference documents the REST endpoints for managing model versions in + Unity Catalog. For more details, see the [registered models API + docs](/api/workspace/registeredmodels).`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteModelVersionRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteModelVersionRequest + + // TODO: short flags + + cmd.Use = "delete FULL_NAME VERSION" + cmd.Short = `Delete a Model Version.` + cmd.Long = `Delete a Model Version. + + Deletes a model version from the specified registered model. Any aliases + assigned to the model version will also be deleted. + + The caller must be a metastore admin or an owner of the parent registered + model. For the latter case, the caller must also be the owner or have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &deleteReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + err = w.ModelVersions.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetModelVersionRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetModelVersionRequest + + // TODO: short flags + + cmd.Use = "get FULL_NAME VERSION" + cmd.Short = `Get a Model Version.` + cmd.Long = `Get a Model Version. + + Get a model version. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the parent registered model. For the latter case, the caller + must also be the owner or have the **USE_CATALOG** privilege on the parent + catalog and the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &getReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + response, err := w.ModelVersions.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start get-by-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getByAliasOverrides []func( + *cobra.Command, + *catalog.GetByAliasRequest, +) + +func newGetByAlias() *cobra.Command { + cmd := &cobra.Command{} + + var getByAliasReq catalog.GetByAliasRequest + + // TODO: short flags + + cmd.Use = "get-by-alias FULL_NAME ALIAS" + cmd.Short = `Get Model Version By Alias.` + cmd.Long = `Get Model Version By Alias. + + Get a model version by alias. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the registered model. For the latter case, the caller must also + be the owner or have the **USE_CATALOG** privilege on the parent catalog and + the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getByAliasReq.FullName = args[0] + getByAliasReq.Alias = args[1] + + response, err := w.ModelVersions.GetByAlias(ctx, getByAliasReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getByAliasOverrides { + fn(cmd, &getByAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetByAlias()) + }) +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListModelVersionsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListModelVersionsRequest + + // TODO: short flags + + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Max number of model versions to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + + cmd.Use = "list FULL_NAME" + cmd.Short = `List Model Versions.` + cmd.Long = `List Model Versions. + + List model versions. You can list model versions under a particular schema, or + list all model versions in the current metastore. + + The returned models are filtered based on the privileges of the calling user. + For example, the metastore admin is able to list all the model versions. A + regular user needs to be the owner or have the **EXECUTE** privilege on the + parent registered model to recieve the model versions in the response. For the + latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + There is no guarantee of a specific ordering of the elements in the response.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + listReq.FullName = args[0] + + response, err := w.ModelVersions.ListAll(ctx, listReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateModelVersionRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateModelVersionRequest + + // TODO: short flags + + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the model version.`) + + cmd.Use = "update FULL_NAME VERSION" + cmd.Short = `Update a Model Version.` + cmd.Long = `Update a Model Version. + + Updates the specified model version. + + The caller must be a metastore admin or an owner of the parent registered + model. For the latter case, the caller must also be the owner or have the + **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** + privilege on the parent schema. + + Currently only the comment of the model version can be updated.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + updateReq.FullName = args[0] + _, err = fmt.Sscan(args[1], &updateReq.Version) + if err != nil { + return fmt.Errorf("invalid VERSION: %s", args[1]) + } + + response, err := w.ModelVersions.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service ModelVersions diff --git a/cmd/workspace/permissions/permissions.go b/cmd/workspace/permissions/permissions.go index 39454b248..c168a1a48 100755 --- a/cmd/workspace/permissions/permissions.go +++ b/cmd/workspace/permissions/permissions.go @@ -19,7 +19,52 @@ func New() *cobra.Command { Use: "permissions", Short: `Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints.`, Long: `Permissions API are used to create read, write, edit, update and manage access - for various users on different objects and endpoints.`, + for various users on different objects and endpoints. + + * **[Cluster permissions](:service:clusters)** — Manage which users can + manage, restart, or attach to clusters. + + * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which + users can use cluster policies. + + * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage + which users can view, manage, run, cancel, or own a Delta Live Tables + pipeline. + + * **[Job permissions](:service:jobs)** — Manage which users can view, + manage, trigger, cancel, or own a job. + + * **[MLflow experiment permissions](:service:experiments)** — Manage which + users can read, edit, or manage MLflow experiments. + + * **[MLflow registered model permissions](:service:modelregistry)** — Manage + which users can read, edit, or manage MLflow registered models. + + * **[Password permissions](:service:users)** — Manage which users can use + password login when SSO is enabled. + + * **[Instance Pool permissions](:service:instancepools)** — Manage which + users can manage or attach to pools. + + * **[Repo permissions](repos)** — Manage which users can read, run, edit, or + manage a repo. + + * **[Serving endpoint permissions](:service:servingendpoints)** — Manage + which users can view, query, or manage a serving endpoint. + + * **[SQL warehouse permissions](:service:warehouses)** — Manage which users + can use or manage SQL warehouses. + + * **[Token permissions](:service:tokenmanagement)** — Manage which users can + create or use tokens. + + * **[Workspace object permissions](:service:workspace)** — Manage which + users can read, run, edit, or manage directories, files, and notebooks. + + For the mapping of the required permissions for specific actions or abilities + and other important information, see [Access Control]. + + [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html`, GroupID: "iam", Annotations: map[string]string{ "package": "iam", @@ -54,8 +99,8 @@ func newGet() *cobra.Command { cmd.Short = `Get object permissions.` cmd.Long = `Get object permissions. - Gets the permission of an object. Objects can inherit permissions from their - parent objects or root objects.` + Gets the permissions of an object. Objects can inherit permissions from their + parent objects or root object.` cmd.Annotations = make(map[string]string) @@ -114,8 +159,8 @@ func newGetPermissionLevels() *cobra.Command { // TODO: short flags cmd.Use = "get-permission-levels REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" - cmd.Short = `Get permission levels.` - cmd.Long = `Get permission levels. + cmd.Short = `Get object permission levels.` + cmd.Long = `Get object permission levels. Gets the permission levels that a user can have on an object.` @@ -180,11 +225,11 @@ func newSet() *cobra.Command { // TODO: array: access_control_list cmd.Use = "set REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" - cmd.Short = `Set permissions.` - cmd.Long = `Set permissions. + cmd.Short = `Set object permissions.` + cmd.Long = `Set object permissions. - Sets permissions on object. Objects can inherit permissions from their parent - objects and root objects.` + Sets permissions on an object. Objects can inherit permissions from their + parent objects or root object.` cmd.Annotations = make(map[string]string) @@ -207,11 +252,11 @@ func newSet() *cobra.Command { setReq.RequestObjectType = args[0] setReq.RequestObjectId = args[1] - err = w.Permissions.Set(ctx, setReq) + response, err := w.Permissions.Set(ctx, setReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. @@ -253,10 +298,11 @@ func newUpdate() *cobra.Command { // TODO: array: access_control_list cmd.Use = "update REQUEST_OBJECT_TYPE REQUEST_OBJECT_ID" - cmd.Short = `Update permission.` - cmd.Long = `Update permission. + cmd.Short = `Update object permissions.` + cmd.Long = `Update object permissions. - Updates the permissions on an object.` + Updates the permissions on an object. Objects can inherit permissions from + their parent objects or root object.` cmd.Annotations = make(map[string]string) @@ -279,11 +325,11 @@ func newUpdate() *cobra.Command { updateReq.RequestObjectType = args[0] updateReq.RequestObjectId = args[1] - err = w.Permissions.Update(ctx, updateReq) + response, err := w.Permissions.Update(ctx, updateReq) if err != nil { return err } - return nil + return cmdio.Render(ctx, response) } // Disable completions since they are not applicable. diff --git a/cmd/workspace/pipelines/pipelines.go b/cmd/workspace/pipelines/pipelines.go index 708343b2e..10bcc226e 100755 --- a/cmd/workspace/pipelines/pipelines.go +++ b/cmd/workspace/pipelines/pipelines.go @@ -272,6 +272,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *pipelines.GetPipelinePermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq pipelines.GetPipelinePermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels PIPELINE_ID" + cmd.Short = `Get pipeline permission levels.` + cmd.Long = `Get pipeline permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + getPermissionLevelsReq.PipelineId = args[0] + + response, err := w.Pipelines.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *pipelines.GetPipelinePermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq pipelines.GetPipelinePermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions PIPELINE_ID" + cmd.Short = `Get pipeline permissions.` + cmd.Long = `Get pipeline permissions. + + Gets the permissions of a pipeline. Pipelines can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + getPermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start get-update command // Slice with functions to override default command behavior. @@ -664,6 +811,90 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *pipelines.PipelinePermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq pipelines.PipelinePermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions PIPELINE_ID" + cmd.Short = `Set pipeline permissions.` + cmd.Long = `Set pipeline permissions. + + Sets permissions on a pipeline. Pipelines can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + setPermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start start-update command // Slice with functions to override default command behavior. @@ -942,4 +1173,88 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *pipelines.PipelinePermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq pipelines.PipelinePermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions PIPELINE_ID" + cmd.Short = `Update pipeline permissions.` + cmd.Long = `Update pipeline permissions. + + Updates the permissions on a pipeline. Pipelines can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No PIPELINE_ID argument specified. Loading names for Pipelines drop-down." + names, err := w.Pipelines.PipelineStateInfoNameToPipelineIdMap(ctx, pipelines.ListPipelinesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Pipelines drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The pipeline for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the pipeline for which to get or manage permissions") + } + updatePermissionsReq.PipelineId = args[0] + + response, err := w.Pipelines.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Pipelines diff --git a/cmd/workspace/query-visualizations/query-visualizations.go b/cmd/workspace/query-visualizations/query-visualizations.go new file mode 100755 index 000000000..fae0f9341 --- /dev/null +++ b/cmd/workspace/query-visualizations/query-visualizations.go @@ -0,0 +1,236 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package query_visualizations + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/sql" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "query-visualizations", + Short: `This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.`, + Long: `This is an evolving API that facilitates the addition and removal of + vizualisations from existing queries within the Databricks Workspace. Data + structures may change over time.`, + GroupID: "sql", + Annotations: map[string]string{ + "package": "sql", + }, + + // This service is being previewed; hide from help output. + Hidden: true, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *sql.CreateQueryVisualizationRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq sql.CreateQueryVisualizationRequest + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "create" + cmd.Short = `Add visualization to a query.` + cmd.Long = `Add visualization to a query.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.QueryVisualizations.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *sql.DeleteQueryVisualizationRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq sql.DeleteQueryVisualizationRequest + + // TODO: short flags + + cmd.Use = "delete ID" + cmd.Short = `Remove visualization.` + cmd.Long = `Remove visualization.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteReq.Id = args[0] + + err = w.QueryVisualizations.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *sql.Visualization, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq sql.Visualization + var updateJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updateJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "update ID" + cmd.Short = `Edit existing visualization.` + cmd.Long = `Edit existing visualization.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updateJson.Unmarshal(&updateReq) + if err != nil { + return err + } + } else { + return fmt.Errorf("please provide command input in JSON format by specifying the --json flag") + } + + response, err := w.QueryVisualizations.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service QueryVisualizations diff --git a/cmd/workspace/registered-models/registered-models.go b/cmd/workspace/registered-models/registered-models.go new file mode 100755 index 000000000..cc7822531 --- /dev/null +++ b/cmd/workspace/registered-models/registered-models.go @@ -0,0 +1,635 @@ +// Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +package registered_models + +import ( + "fmt" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/databricks-sdk-go/service/catalog" + "github.com/spf13/cobra" +) + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var cmdOverrides []func(*cobra.Command) + +func New() *cobra.Command { + cmd := &cobra.Command{ + Use: "registered-models", + Short: `Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.`, + Long: `Databricks provides a hosted version of MLflow Model Registry in Unity + Catalog. Models in Unity Catalog provide centralized access control, auditing, + lineage, and discovery of ML models across Databricks workspaces. + + An MLflow registered model resides in the third layer of Unity Catalog’s + three-level namespace. Registered models contain model versions, which + correspond to actual ML models (MLflow models). Creating new model versions + currently requires use of the MLflow Python client. Once model versions are + created, you can load them for batch inference using MLflow Python client + APIs, or deploy them for real-time serving using Databricks Model Serving. + + All operations on registered models and model versions require USE_CATALOG + permissions on the enclosing catalog and USE_SCHEMA permissions on the + enclosing schema. In addition, the following additional privileges are + required for various operations: + + * To create a registered model, users must additionally have the CREATE_MODEL + permission on the target schema. * To view registered model or model version + metadata, model version data files, or invoke a model version, users must + additionally have the EXECUTE permission on the registered model * To update + registered model or model version tags, users must additionally have APPLY TAG + permissions on the registered model * To update other registered model or + model version metadata (comments, aliases) create a new model version, or + update permissions on the registered model, users must be owners of the + registered model. + + Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. + tagging, grants) that specify a securable type, use "FUNCTION" as the + securable type.`, + GroupID: "catalog", + Annotations: map[string]string{ + "package": "catalog", + }, + } + + // Apply optional overrides to this command. + for _, fn := range cmdOverrides { + fn(cmd) + } + + return cmd +} + +// start create command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var createOverrides []func( + *cobra.Command, + *catalog.CreateRegisteredModelRequest, +) + +func newCreate() *cobra.Command { + cmd := &cobra.Command{} + + var createReq catalog.CreateRegisteredModelRequest + var createJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&createJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the registered model.`) + cmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud under which model version data files are stored.`) + + cmd.Use = "create CATALOG_NAME SCHEMA_NAME NAME" + cmd.Short = `Create a Registered Model.` + cmd.Long = `Create a Registered Model. + + Creates a new registered model in Unity Catalog. + + File storage for model versions in the registered model will be located in the + default location which is specified by the parent schema, or the parent + catalog, or the Metastore. + + For registered model creation to succeed, the user must satisfy the following + conditions: - The caller must be a metastore admin, or be the owner of the + parent catalog and schema, or have the **USE_CATALOG** privilege on the parent + catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller + must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(3) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = createJson.Unmarshal(&createReq) + if err != nil { + return err + } + } else { + createReq.CatalogName = args[0] + createReq.SchemaName = args[1] + createReq.Name = args[2] + } + + response, err := w.RegisteredModels.Create(ctx, createReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range createOverrides { + fn(cmd, &createReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newCreate()) + }) +} + +// start delete command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteOverrides []func( + *cobra.Command, + *catalog.DeleteRegisteredModelRequest, +) + +func newDelete() *cobra.Command { + cmd := &cobra.Command{} + + var deleteReq catalog.DeleteRegisteredModelRequest + + // TODO: short flags + + cmd.Use = "delete FULL_NAME" + cmd.Short = `Delete a Registered Model.` + cmd.Long = `Delete a Registered Model. + + Deletes a registered model and all its model versions from the specified + parent catalog and schema. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + deleteReq.FullName = args[0] + + err = w.RegisteredModels.Delete(ctx, deleteReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteOverrides { + fn(cmd, &deleteReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDelete()) + }) +} + +// start delete-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var deleteAliasOverrides []func( + *cobra.Command, + *catalog.DeleteAliasRequest, +) + +func newDeleteAlias() *cobra.Command { + cmd := &cobra.Command{} + + var deleteAliasReq catalog.DeleteAliasRequest + + // TODO: short flags + + cmd.Use = "delete-alias FULL_NAME ALIAS" + cmd.Short = `Delete a Registered Model Alias.` + cmd.Long = `Delete a Registered Model Alias. + + Deletes a registered model alias. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + deleteAliasReq.FullName = args[0] + deleteAliasReq.Alias = args[1] + + err = w.RegisteredModels.DeleteAlias(ctx, deleteAliasReq) + if err != nil { + return err + } + return nil + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range deleteAliasOverrides { + fn(cmd, &deleteAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newDeleteAlias()) + }) +} + +// start get command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getOverrides []func( + *cobra.Command, + *catalog.GetRegisteredModelRequest, +) + +func newGet() *cobra.Command { + cmd := &cobra.Command{} + + var getReq catalog.GetRegisteredModelRequest + + // TODO: short flags + + cmd.Use = "get FULL_NAME" + cmd.Short = `Get a Registered Model.` + cmd.Long = `Get a Registered Model. + + Get a registered model. + + The caller must be a metastore admin or an owner of (or have the **EXECUTE** + privilege on) the registered model. For the latter case, the caller must also + be the owner or have the **USE_CATALOG** privilege on the parent catalog and + the **USE_SCHEMA** privilege on the parent schema.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + getReq.FullName = args[0] + + response, err := w.RegisteredModels.Get(ctx, getReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getOverrides { + fn(cmd, &getReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGet()) + }) +} + +// start list command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var listOverrides []func( + *cobra.Command, + *catalog.ListRegisteredModelsRequest, +) + +func newList() *cobra.Command { + cmd := &cobra.Command{} + + var listReq catalog.ListRegisteredModelsRequest + var listJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&listJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Flags().StringVar(&listReq.CatalogName, "catalog-name", listReq.CatalogName, `The identifier of the catalog under which to list registered models.`) + cmd.Flags().IntVar(&listReq.MaxResults, "max-results", listReq.MaxResults, `Max number of registered models to return.`) + cmd.Flags().StringVar(&listReq.PageToken, "page-token", listReq.PageToken, `Opaque token to send for the next page of results (pagination).`) + cmd.Flags().StringVar(&listReq.SchemaName, "schema-name", listReq.SchemaName, `The identifier of the schema under which to list registered models.`) + + cmd.Use = "list" + cmd.Short = `List Registered Models.` + cmd.Long = `List Registered Models. + + List registered models. You can list registered models under a particular + schema, or list all registered models in the current metastore. + + The returned models are filtered based on the privileges of the calling user. + For example, the metastore admin is able to list all the registered models. A + regular user needs to be the owner or have the **EXECUTE** privilege on the + registered model to recieve the registered models in the response. For the + latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + There is no guarantee of a specific ordering of the elements in the response.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = listJson.Unmarshal(&listReq) + if err != nil { + return err + } + } else { + } + + response, err := w.RegisteredModels.ListAll(ctx, listReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range listOverrides { + fn(cmd, &listReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newList()) + }) +} + +// start set-alias command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setAliasOverrides []func( + *cobra.Command, + *catalog.SetRegisteredModelAliasRequest, +) + +func newSetAlias() *cobra.Command { + cmd := &cobra.Command{} + + var setAliasReq catalog.SetRegisteredModelAliasRequest + var setAliasJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setAliasJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + cmd.Use = "set-alias FULL_NAME ALIAS VERSION_NUM" + cmd.Short = `Set a Registered Model Alias.` + cmd.Long = `Set a Registered Model Alias. + + Set an alias on the specified registered model. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(3) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setAliasJson.Unmarshal(&setAliasReq) + if err != nil { + return err + } + } else { + setAliasReq.FullName = args[0] + setAliasReq.Alias = args[1] + _, err = fmt.Sscan(args[2], &setAliasReq.VersionNum) + if err != nil { + return fmt.Errorf("invalid VERSION_NUM: %s", args[2]) + } + } + + response, err := w.RegisteredModels.SetAlias(ctx, setAliasReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setAliasOverrides { + fn(cmd, &setAliasReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetAlias()) + }) +} + +// start update command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updateOverrides []func( + *cobra.Command, + *catalog.UpdateRegisteredModelRequest, +) + +func newUpdate() *cobra.Command { + cmd := &cobra.Command{} + + var updateReq catalog.UpdateRegisteredModelRequest + + // TODO: short flags + + cmd.Flags().StringVar(&updateReq.Comment, "comment", updateReq.Comment, `The comment attached to the registered model.`) + cmd.Flags().StringVar(&updateReq.Name, "name", updateReq.Name, `The name of the registered model.`) + cmd.Flags().StringVar(&updateReq.Owner, "owner", updateReq.Owner, `The identifier of the user who owns the registered model.`) + + cmd.Use = "update FULL_NAME" + cmd.Short = `Update a Registered Model.` + cmd.Long = `Update a Registered Model. + + Updates the specified registered model. + + The caller must be a metastore admin or an owner of the registered model. For + the latter case, the caller must also be the owner or have the **USE_CATALOG** + privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent + schema. + + Currently only the name, the owner or the comment of the registered model can + be updated.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No FULL_NAME argument specified. Loading names for Registered Models drop-down." + names, err := w.RegisteredModels.RegisteredModelInfoNameToFullNameMap(ctx, catalog.ListRegisteredModelsRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Registered Models drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The three-level (fully qualified) name of the registered model") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the three-level (fully qualified) name of the registered model") + } + updateReq.FullName = args[0] + + response, err := w.RegisteredModels.Update(ctx, updateReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updateOverrides { + fn(cmd, &updateReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdate()) + }) +} + +// end service RegisteredModels diff --git a/cmd/workspace/repos/repos.go b/cmd/workspace/repos/repos.go index b1e003717..2d510e904 100755 --- a/cmd/workspace/repos/repos.go +++ b/cmd/workspace/repos/repos.go @@ -275,6 +275,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *workspace.GetRepoPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq workspace.GetRepoPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels REPO_ID" + cmd.Short = `Get repo permission levels.` + cmd.Long = `Get repo permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + getPermissionLevelsReq.RepoId = args[0] + + response, err := w.Repos.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *workspace.GetRepoPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq workspace.GetRepoPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions REPO_ID" + cmd.Short = `Get repo permissions.` + cmd.Long = `Get repo permissions. + + Gets the permissions of a repo. Repos can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + getPermissionsReq.RepoId = args[0] + + response, err := w.Repos.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -351,6 +498,90 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *workspace.RepoPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq workspace.RepoPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions REPO_ID" + cmd.Short = `Set repo permissions.` + cmd.Long = `Set repo permissions. + + Sets permissions on a repo. Repos can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + setPermissionsReq.RepoId = args[0] + + response, err := w.Repos.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start update command // Slice with functions to override default command behavior. @@ -440,4 +671,88 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *workspace.RepoPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq workspace.RepoPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions REPO_ID" + cmd.Short = `Update repo permissions.` + cmd.Long = `Update repo permissions. + + Updates the permissions on a repo. Repos can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No REPO_ID argument specified. Loading names for Repos drop-down." + names, err := w.Repos.RepoInfoPathToIdMap(ctx, workspace.ListReposRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Repos drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The repo for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the repo for which to get or manage permissions") + } + updatePermissionsReq.RepoId = args[0] + + response, err := w.Repos.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Repos diff --git a/cmd/workspace/secrets/secrets.go b/cmd/workspace/secrets/secrets.go index a8b907ac4..5425da90c 100755 --- a/cmd/workspace/secrets/secrets.go +++ b/cmd/workspace/secrets/secrets.go @@ -427,6 +427,79 @@ func init() { }) } +// start get-secret command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getSecretOverrides []func( + *cobra.Command, + *workspace.GetSecretRequest, +) + +func newGetSecret() *cobra.Command { + cmd := &cobra.Command{} + + var getSecretReq workspace.GetSecretRequest + + // TODO: short flags + + cmd.Use = "get-secret SCOPE KEY" + cmd.Short = `Get a secret.` + cmd.Long = `Get a secret. + + Gets the bytes representation of a secret value for the specified scope and + key. + + Users need the READ permission to make this call. + + Note that the secret value returned is in bytes. The interpretation of the + bytes is determined by the caller in DBUtils and the type the data is decoded + into. + + Throws PERMISSION_DENIED if the user does not have permission to make this + API call. Throws RESOURCE_DOES_NOT_EXIST if no such secret or secret scope + exists.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getSecretReq.Scope = args[0] + getSecretReq.Key = args[1] + + response, err := w.Secrets.GetSecret(ctx, getSecretReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getSecretOverrides { + fn(cmd, &getSecretReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetSecret()) + }) +} + // start list-acls command // Slice with functions to override default command behavior. diff --git a/cmd/workspace/service-principals/service-principals.go b/cmd/workspace/service-principals/service-principals.go index f30a92d4b..1958dd21b 100755 --- a/cmd/workspace/service-principals/service-principals.go +++ b/cmd/workspace/service-principals/service-principals.go @@ -367,7 +367,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update service principal details.` diff --git a/cmd/workspace/serving-endpoints/serving-endpoints.go b/cmd/workspace/serving-endpoints/serving-endpoints.go index 33b0abac7..e22a38443 100755 --- a/cmd/workspace/serving-endpoints/serving-endpoints.go +++ b/cmd/workspace/serving-endpoints/serving-endpoints.go @@ -374,6 +374,129 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *serving.GetServingEndpointPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq serving.GetServingEndpointPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels SERVING_ENDPOINT_ID" + cmd.Short = `Get serving endpoint permission levels.` + cmd.Long = `Get serving endpoint permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *serving.GetServingEndpointPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq serving.GetServingEndpointPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Get serving endpoint permissions.` + cmd.Long = `Get serving endpoint permissions. + + Gets the permissions of a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -542,6 +665,78 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *serving.ServingEndpointPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq serving.ServingEndpointPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Set serving endpoint permissions.` + cmd.Long = `Set serving endpoint permissions. + + Sets permissions on a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start update-config command // Slice with functions to override default command behavior. @@ -630,4 +825,76 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *serving.ServingEndpointPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq serving.ServingEndpointPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions SERVING_ENDPOINT_ID" + cmd.Short = `Update serving endpoint permissions.` + cmd.Long = `Update serving endpoint permissions. + + Updates the permissions on a serving endpoint. Serving endpoints can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(1) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.ServingEndpointId = args[0] + + response, err := w.ServingEndpoints.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service ServingEndpoints diff --git a/cmd/workspace/token-management/token-management.go b/cmd/workspace/token-management/token-management.go index dcee2f0ee..b934e2640 100755 --- a/cmd/workspace/token-management/token-management.go +++ b/cmd/workspace/token-management/token-management.go @@ -262,6 +262,103 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permission-levels" + cmd.Short = `Get token permission levels.` + cmd.Long = `Get token permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.TokenManagement.GetPermissionLevels(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permissions" + cmd.Short = `Get token permissions.` + cmd.Long = `Get token permissions. + + Gets the permissions of all tokens. Tokens can inherit permissions from their + root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.TokenManagement.GetPermissions(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -337,4 +434,154 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *settings.TokenPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq settings.TokenPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions" + cmd.Short = `Set token permissions.` + cmd.Long = `Set token permissions. + + Sets permissions on all tokens. Tokens can inherit permissions from their root + object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.TokenManagement.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *settings.TokenPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq settings.TokenPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions" + cmd.Short = `Update token permissions.` + cmd.Long = `Update token permissions. + + Updates the permissions on all tokens. Tokens can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.TokenManagement.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service TokenManagement diff --git a/cmd/workspace/users/users.go b/cmd/workspace/users/users.go index 2dfbf6e86..b1a8b0572 100755 --- a/cmd/workspace/users/users.go +++ b/cmd/workspace/users/users.go @@ -276,6 +276,103 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permission-levels" + cmd.Short = `Get password permission levels.` + cmd.Long = `Get password permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.Users.GetPermissionLevels(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + cmd.Use = "get-permissions" + cmd.Short = `Get password permissions.` + cmd.Long = `Get password permissions. + + Gets the permissions of all passwords. Passwords can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + response, err := w.Users.GetPermissions(ctx) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start list command // Slice with functions to override default command behavior. @@ -375,7 +472,7 @@ func newPatch() *cobra.Command { cmd.Flags().Var(&patchJson, "json", `either inline JSON string or @path/to/file.json with request body`) // TODO: array: Operations - // TODO: array: schema + // TODO: array: schemas cmd.Use = "patch ID" cmd.Short = `Update user details.` @@ -441,6 +538,81 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq iam.PasswordPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions" + cmd.Short = `Set password permissions.` + cmd.Long = `Set password permissions. + + Sets permissions on all passwords. Passwords can inherit permissions from + their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.Users.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start update command // Slice with functions to override default command behavior. @@ -534,4 +706,79 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *iam.PasswordPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq iam.PasswordPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions" + cmd.Short = `Update password permissions.` + cmd.Long = `Update password permissions. + + Updates the permissions on all passwords. Passwords can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(0) + if cmd.Flags().Changed("json") { + check = cobra.ExactArgs(0) + } + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } else { + } + + response, err := w.Users.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Users diff --git a/cmd/workspace/volumes/volumes.go b/cmd/workspace/volumes/volumes.go index 2d2026820..4dbfc5856 100755 --- a/cmd/workspace/volumes/volumes.go +++ b/cmd/workspace/volumes/volumes.go @@ -32,9 +32,6 @@ func New() *cobra.Command { Annotations: map[string]string{ "package": "catalog", }, - - // This service is being previewed; hide from help output. - Hidden: true, } // Apply optional overrides to this command. @@ -66,7 +63,7 @@ func newCreate() *cobra.Command { cmd.Flags().StringVar(&createReq.Comment, "comment", createReq.Comment, `The comment attached to the volume.`) cmd.Flags().StringVar(&createReq.StorageLocation, "storage-location", createReq.StorageLocation, `The storage location on the cloud.`) - cmd.Use = "create CATALOG_NAME NAME SCHEMA_NAME VOLUME_TYPE" + cmd.Use = "create CATALOG_NAME SCHEMA_NAME NAME VOLUME_TYPE" cmd.Short = `Create a Volume.` cmd.Long = `Create a Volume. @@ -111,8 +108,8 @@ func newCreate() *cobra.Command { } } else { createReq.CatalogName = args[0] - createReq.Name = args[1] - createReq.SchemaName = args[2] + createReq.SchemaName = args[1] + createReq.Name = args[2] _, err = fmt.Sscan(args[3], &createReq.VolumeType) if err != nil { return fmt.Errorf("invalid VOLUME_TYPE: %s", args[3]) @@ -253,6 +250,9 @@ func newList() *cobra.Command { There is no guarantee of a specific ordering of the elements in the array.` + // This command is being previewed; hide from help output. + cmd.Hidden = true + cmd.Annotations = make(map[string]string) cmd.Args = func(cmd *cobra.Command, args []string) error { diff --git a/cmd/workspace/warehouses/warehouses.go b/cmd/workspace/warehouses/warehouses.go index 378b931db..c64e0e0b5 100755 --- a/cmd/workspace/warehouses/warehouses.go +++ b/cmd/workspace/warehouses/warehouses.go @@ -418,6 +418,153 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *sql.GetWarehousePermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq sql.GetWarehousePermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels WAREHOUSE_ID" + cmd.Short = `Get SQL warehouse permission levels.` + cmd.Long = `Get SQL warehouse permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + getPermissionLevelsReq.WarehouseId = args[0] + + response, err := w.Warehouses.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *sql.GetWarehousePermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq sql.GetWarehousePermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions WAREHOUSE_ID" + cmd.Short = `Get SQL warehouse permissions.` + cmd.Long = `Get SQL warehouse permissions. + + Gets the permissions of a SQL warehouse. SQL warehouses can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + getPermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start get-workspace-warehouse-config command // Slice with functions to override default command behavior. @@ -541,6 +688,90 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *sql.WarehousePermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq sql.WarehousePermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions WAREHOUSE_ID" + cmd.Short = `Set SQL warehouse permissions.` + cmd.Long = `Set SQL warehouse permissions. + + Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions + from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + setPermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + // start set-workspace-warehouse-config command // Slice with functions to override default command behavior. @@ -818,4 +1049,88 @@ func init() { }) } +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *sql.WarehousePermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq sql.WarehousePermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions WAREHOUSE_ID" + cmd.Short = `Update SQL warehouse permissions.` + cmd.Long = `Update SQL warehouse permissions. + + Updates the permissions on a SQL warehouse. SQL warehouses can inherit + permissions from their root object.` + + cmd.Annotations = make(map[string]string) + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + if len(args) == 0 { + promptSpinner := cmdio.Spinner(ctx) + promptSpinner <- "No WAREHOUSE_ID argument specified. Loading names for Warehouses drop-down." + names, err := w.Warehouses.EndpointInfoNameToIdMap(ctx, sql.ListWarehousesRequest{}) + close(promptSpinner) + if err != nil { + return fmt.Errorf("failed to load names for Warehouses drop-down. Please manually specify required arguments. Original error: %w", err) + } + id, err := cmdio.Select(ctx, names, "The SQL warehouse for which to get or manage permissions") + if err != nil { + return err + } + args = append(args, id) + } + if len(args) != 1 { + return fmt.Errorf("expected to have the sql warehouse for which to get or manage permissions") + } + updatePermissionsReq.WarehouseId = args[0] + + response, err := w.Warehouses.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Warehouses diff --git a/cmd/workspace/workspace/workspace.go b/cmd/workspace/workspace/workspace.go index aeca95253..124680f0b 100755 --- a/cmd/workspace/workspace/workspace.go +++ b/cmd/workspace/workspace/workspace.go @@ -212,6 +212,131 @@ func init() { }) } +// start get-permission-levels command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionLevelsOverrides []func( + *cobra.Command, + *workspace.GetWorkspaceObjectPermissionLevelsRequest, +) + +func newGetPermissionLevels() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionLevelsReq workspace.GetWorkspaceObjectPermissionLevelsRequest + + // TODO: short flags + + cmd.Use = "get-permission-levels WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Get workspace object permission levels.` + cmd.Long = `Get workspace object permission levels. + + Gets the permission levels that a user can have on an object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionLevelsReq.WorkspaceObjectType = args[0] + getPermissionLevelsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.GetPermissionLevels(ctx, getPermissionLevelsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionLevelsOverrides { + fn(cmd, &getPermissionLevelsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissionLevels()) + }) +} + +// start get-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var getPermissionsOverrides []func( + *cobra.Command, + *workspace.GetWorkspaceObjectPermissionsRequest, +) + +func newGetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var getPermissionsReq workspace.GetWorkspaceObjectPermissionsRequest + + // TODO: short flags + + cmd.Use = "get-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Get workspace object permissions.` + cmd.Long = `Get workspace object permissions. + + Gets the permissions of a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + getPermissionsReq.WorkspaceObjectType = args[0] + getPermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.GetPermissions(ctx, getPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range getPermissionsOverrides { + fn(cmd, &getPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newGetPermissions()) + }) +} + // start get-status command // Slice with functions to override default command behavior. @@ -507,4 +632,150 @@ func init() { }) } +// start set-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var setPermissionsOverrides []func( + *cobra.Command, + *workspace.WorkspaceObjectPermissionsRequest, +) + +func newSetPermissions() *cobra.Command { + cmd := &cobra.Command{} + + var setPermissionsReq workspace.WorkspaceObjectPermissionsRequest + var setPermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&setPermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "set-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Set workspace object permissions.` + cmd.Long = `Set workspace object permissions. + + Sets permissions on a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = setPermissionsJson.Unmarshal(&setPermissionsReq) + if err != nil { + return err + } + } + setPermissionsReq.WorkspaceObjectType = args[0] + setPermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.SetPermissions(ctx, setPermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range setPermissionsOverrides { + fn(cmd, &setPermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newSetPermissions()) + }) +} + +// start update-permissions command + +// Slice with functions to override default command behavior. +// Functions can be added from the `init()` function in manually curated files in this directory. +var updatePermissionsOverrides []func( + *cobra.Command, + *workspace.WorkspaceObjectPermissionsRequest, +) + +func newUpdatePermissions() *cobra.Command { + cmd := &cobra.Command{} + + var updatePermissionsReq workspace.WorkspaceObjectPermissionsRequest + var updatePermissionsJson flags.JsonFlag + + // TODO: short flags + cmd.Flags().Var(&updatePermissionsJson, "json", `either inline JSON string or @path/to/file.json with request body`) + + // TODO: array: access_control_list + + cmd.Use = "update-permissions WORKSPACE_OBJECT_TYPE WORKSPACE_OBJECT_ID" + cmd.Short = `Update workspace object permissions.` + cmd.Long = `Update workspace object permissions. + + Updates the permissions on a workspace object. Workspace objects can inherit + permissions from their parent objects or root object.` + + cmd.Annotations = make(map[string]string) + + cmd.Args = func(cmd *cobra.Command, args []string) error { + check := cobra.ExactArgs(2) + return check(cmd, args) + } + + cmd.PreRunE = root.MustWorkspaceClient + cmd.RunE = func(cmd *cobra.Command, args []string) (err error) { + ctx := cmd.Context() + w := root.WorkspaceClient(ctx) + + if cmd.Flags().Changed("json") { + err = updatePermissionsJson.Unmarshal(&updatePermissionsReq) + if err != nil { + return err + } + } + updatePermissionsReq.WorkspaceObjectType = args[0] + updatePermissionsReq.WorkspaceObjectId = args[1] + + response, err := w.Workspace.UpdatePermissions(ctx, updatePermissionsReq) + if err != nil { + return err + } + return cmdio.Render(ctx, response) + } + + // Disable completions since they are not applicable. + // Can be overridden by manual implementation in `override.go`. + cmd.ValidArgsFunction = cobra.NoFileCompletions + + // Apply optional overrides to this command. + for _, fn := range updatePermissionsOverrides { + fn(cmd, &updatePermissionsReq) + } + + return cmd +} + +func init() { + cmdOverrides = append(cmdOverrides, func(cmd *cobra.Command) { + cmd.AddCommand(newUpdatePermissions()) + }) +} + // end service Workspace diff --git a/go.mod b/go.mod index 9534a4c9d..7e24b0db2 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.14.1 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.19.0 // Apache 2.0 github.com/fatih/color v1.15.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.3.0 // BSD-3-Clause @@ -32,7 +32,7 @@ require ( ) require ( - cloud.google.com/go/compute v1.20.1 // indirect + cloud.google.com/go/compute v1.23.0 // indirect cloud.google.com/go/compute/metadata v0.2.3 // indirect github.com/ProtonMail/go-crypto v0.0.0-20230217124315-7d5c6f04bbb8 // indirect github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect @@ -42,7 +42,7 @@ require ( github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-querystring v1.1.0 // indirect - github.com/google/s2a-go v0.1.4 // indirect + github.com/google/s2a-go v0.1.5 // indirect github.com/googleapis/enterprise-certificate-proxy v0.2.5 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect @@ -54,10 +54,10 @@ require ( golang.org/x/net v0.14.0 // indirect golang.org/x/sys v0.11.0 // indirect golang.org/x/time v0.3.0 // indirect - google.golang.org/api v0.131.0 // indirect + google.golang.org/api v0.138.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130 // indirect - google.golang.org/grpc v1.56.2 // indirect + google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 // indirect + google.golang.org/grpc v1.57.0 // indirect google.golang.org/protobuf v1.31.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect diff --git a/go.sum b/go.sum index b8c90e5e6..83bb01b62 100644 --- a/go.sum +++ b/go.sum @@ -1,7 +1,7 @@ cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -cloud.google.com/go/compute v1.20.1 h1:6aKEtlUiwEpJzM001l0yFkpXmUVXaN8W+fbkb2AZNbg= -cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= +cloud.google.com/go/compute v1.23.0 h1:tP41Zoavr8ptEqaW6j+LQOnyBBhO7OkOMAGrgLopTwY= +cloud.google.com/go/compute v1.23.0/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM= cloud.google.com/go/compute/metadata v0.2.3 h1:mg4jlk7mCAj6xXp9UJ4fjI9VUI5rubuGBW5aJ7UnBMY= cloud.google.com/go/compute/metadata v0.2.3/go.mod h1:VAV5nSsACxMJvgaAuX6Pk2AawlZn8kiOGuCv6gTkwuA= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= @@ -36,8 +36,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/databricks/databricks-sdk-go v0.14.1 h1:s9x18c2i6XbJxem6zKdTrrwEUXQX/Nzn0iVM+qGlRus= -github.com/databricks/databricks-sdk-go v0.14.1/go.mod h1:0iuEtPIoD6oqw7OuFbPskhlEryt2FPH+Ies1UYiiDy8= +github.com/databricks/databricks-sdk-go v0.19.0 h1:Xh5A90/+8ehW7fTqoQbQK5xZu7a/akv3Xwv8UdWB4GU= +github.com/databricks/databricks-sdk-go v0.19.0/go.mod h1:Bt/3i3ry/rQdE6Y+psvkAENlp+LzJHaQK5PsLIstQb4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -93,8 +93,8 @@ github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= -github.com/google/s2a-go v0.1.4 h1:1kZ/sQM3srePvKs3tXAvQzo66XfcReoqFpIpIccE7Oc= -github.com/google/s2a-go v0.1.4/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= +github.com/google/s2a-go v0.1.5 h1:8IYp3w9nysqv3JH+NJgXJzGbDHzLOTj43BmSkp+O7qg= +github.com/google/s2a-go v0.1.5/go.mod h1:Ej+mSEMGRnqRzjc7VtF+jdBwYG5fuJfiZ8ELkjEwM0A= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -257,8 +257,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -google.golang.org/api v0.131.0 h1:AcgWS2edQ4chVEt/SxgDKubVu/9/idCJy00tBGuGB4M= -google.golang.org/api v0.131.0/go.mod h1:7vtkbKv2REjJbxmHSkBTBQ5LUGvPdAqjjvt84XAfhpA= +google.golang.org/api v0.138.0 h1:K/tVp05MxNVbHShRw9m7e9VJGdagNeTdMzqPH7AUqr0= +google.golang.org/api v0.138.0/go.mod h1:4xyob8CxC+0GChNBvEUAk8VBKNvYOTWM9T3v3UfRxuY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= @@ -267,8 +267,8 @@ google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoA google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130 h1:2FZP5XuJY9zQyGM5N0rtovnoXjiMUEIUMvw0m9wlpLc= -google.golang.org/genproto/googleapis/rpc v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:8mL13HKkDa+IuJ8yruA3ci0q+0vsUz4m//+ottjwS5o= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577 h1:wukfNtZmZUurLN/atp2hiIeTKn7QJWIQdHzqmsOnAOk= +google.golang.org/genproto/googleapis/rpc v0.0.0-20230807174057-1744710a1577/go.mod h1:+Bk1OCOj40wS2hwAMA+aCW9ypzm63QTBBHp6lQ3p+9M= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= @@ -277,8 +277,8 @@ google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTp google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.45.0/go.mod h1:lN7owxKUQEqMfSyQikvvk5tf/6zMPsrK+ONuO11+0rQ= -google.golang.org/grpc v1.56.2 h1:fVRFRnXvU+x6C4IlHZewvJOVHoOv1TUuQyoRsYnB4bI= -google.golang.org/grpc v1.56.2/go.mod h1:I9bI3vqKfayGqPUAwGdOSu7kt6oIJLixfffKrpXqQ9s= +google.golang.org/grpc v1.57.0 h1:kfzNeI/klCGD2YPMUlaGNT3pxvYfga7smW3Vth8Zsiw= +google.golang.org/grpc v1.57.0/go.mod h1:Sd+9RMTACXwmub0zcNY2c4arhtrbBYD1AUHI/dt16Mo= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= diff --git a/internal/fs_mkdir_test.go b/internal/fs_mkdir_test.go index 83417c136..b743ebb79 100644 --- a/internal/fs_mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -112,6 +112,6 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) // Different cloud providers return different errors. - regex := regexp.MustCompile(`^Path is a file: .*$|^Cannot create directory .* because .* is an existing file\.$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) + regex := regexp.MustCompile(`(^|: )Path is a file: .*$|^Cannot create directory .* because .* is an existing file\.$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) assert.Regexp(t, regex, err.Error()) } diff --git a/internal/sync_test.go b/internal/sync_test.go index 66b5fd3ca..bc1cbd914 100644 --- a/internal/sync_test.go +++ b/internal/sync_test.go @@ -159,7 +159,7 @@ func (a *syncTest) remoteFileContent(ctx context.Context, relativePath string, e var res []byte a.c.Eventually(func() bool { - err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, &res) + err = apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &res) require.NoError(a.t, err) actualContent := string(res) return actualContent == expectedContent diff --git a/libs/filer/files_client.go b/libs/filer/files_client.go index 285338b66..17884d573 100644 --- a/libs/filer/files_client.go +++ b/libs/filer/files_client.go @@ -104,11 +104,8 @@ func (w *FilesClient) Write(ctx context.Context, name string, reader io.Reader, overwrite := slices.Contains(mode, OverwriteIfExists) urlPath = fmt.Sprintf("%s?overwrite=%t", urlPath, overwrite) - err = w.apiClient.Do(ctx, http.MethodPut, urlPath, reader, nil, - func(r *http.Request) error { - r.Header.Set("Content-Type", "application/octet-stream") - return nil - }) + headers := map[string]string{"Content-Type": "application/octet-stream"} + err = w.apiClient.Do(ctx, http.MethodPut, urlPath, headers, reader, nil) // Return early on success. if err == nil { @@ -136,7 +133,7 @@ func (w *FilesClient) Read(ctx context.Context, name string) (io.ReadCloser, err } var buf bytes.Buffer - err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, &buf) + err = w.apiClient.Do(ctx, http.MethodGet, urlPath, nil, nil, &buf) // Return early on success. if err == nil { @@ -168,7 +165,7 @@ func (w *FilesClient) Delete(ctx context.Context, name string, mode ...DeleteMod return CannotDeleteRootError{} } - err = w.apiClient.Do(ctx, http.MethodDelete, urlPath, nil, nil) + err = w.apiClient.Do(ctx, http.MethodDelete, urlPath, nil, nil, nil) // Return early on success. if err == nil { @@ -210,11 +207,7 @@ func (w *FilesClient) Stat(ctx context.Context, name string) (fs.FileInfo, error return nil, err } - err = w.apiClient.Do(ctx, http.MethodHead, urlPath, nil, nil, - func(r *http.Request) error { - r.Header.Del("Content-Type") - return nil - }) + err = w.apiClient.Do(ctx, http.MethodHead, urlPath, nil, nil, nil) // If the HEAD requests succeeds, the file exists. if err == nil { diff --git a/libs/filer/workspace_files_client.go b/libs/filer/workspace_files_client.go index ed4ad7a2b..41e35d9d1 100644 --- a/libs/filer/workspace_files_client.go +++ b/libs/filer/workspace_files_client.go @@ -115,7 +115,7 @@ func (w *WorkspaceFilesClient) Write(ctx context.Context, name string, reader io return err } - err = w.apiClient.Do(ctx, http.MethodPost, urlPath, body, nil) + err = w.apiClient.Do(ctx, http.MethodPost, urlPath, nil, body, nil) // Return early on success. if err == nil { From 7a130a3e6e4302e835a19dc281816a31f37ee0d6 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 5 Sep 2023 11:58:45 +0200 Subject: [PATCH 095/139] Group permission related commands (#730) ## Changes Before: ``` Usage: databricks instance-pools [command] Available Commands: create Create a new instance pool. delete Delete an instance pool. edit Edit an existing instance pool. get Get instance pool information. get-permission-levels Get instance pool permission levels. get-permissions Get instance pool permissions. list List instance pool info. set-permissions Set instance pool permissions. update-permissions Update instance pool permissions. ``` After: ``` Usage: databricks instance-pools [command] Available Commands create Create a new instance pool. delete Delete an instance pool. edit Edit an existing instance pool. get Get instance pool information. list List instance pool info. Permission Commands get-permission-levels Get instance pool permission levels. get-permissions Get instance pool permissions. set-permissions Set instance pool permissions. update-permissions Update instance pool permissions. ``` ## Tests Manual. --- cmd/cmd.go | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/cmd/cmd.go b/cmd/cmd.go index 04d7cc804..032fde5cd 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -1,6 +1,8 @@ package cmd import ( + "strings" + "github.com/databricks/cli/cmd/account" "github.com/databricks/cli/cmd/api" "github.com/databricks/cli/cmd/auth" @@ -14,6 +16,11 @@ import ( "github.com/spf13/cobra" ) +const ( + mainGroup = "main" + permissionsGroup = "permissions" +) + func New() *cobra.Command { cli := root.New() @@ -22,6 +29,31 @@ func New() *cobra.Command { // Add workspace subcommands. for _, cmd := range workspace.All() { + // Built-in groups for the workspace commands. + groups := []cobra.Group{ + { + ID: mainGroup, + Title: "Available Commands", + }, + { + ID: permissionsGroup, + Title: "Permission Commands", + }, + } + for i := range groups { + cmd.AddGroup(&groups[i]) + } + + // Order the permissions subcommands after the main commands. + for _, sub := range cmd.Commands() { + switch { + case strings.HasSuffix(sub.Name(), "-permissions"), strings.HasSuffix(sub.Name(), "-permission-levels"): + sub.GroupID = permissionsGroup + default: + sub.GroupID = mainGroup + } + } + cli.AddCommand(cmd) } From 2f2386ef5a505c7391e75096736fa3e99894d7a4 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 5 Sep 2023 11:58:56 +0200 Subject: [PATCH 096/139] Work on GitHub Action (#733) ## Changes * Run the build workflow on push to main to properly use the build cache Same as https://github.com/databricks/databricks-sdk-go/pull/601. ## Tests n/a --- .github/workflows/push.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 6194d4905..f0fa2ee68 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -5,6 +5,14 @@ on: types: [opened, synchronize] merge_group: types: [checks_requested] + push: + # Always run on push to main. The build cache can only be reused + # if it was saved by a run from the repository's default branch. + # The run result will be identical to that from the merge queue + # because the commit is identical, yet we need to perform it to + # seed the build cache. + branches: + - main jobs: tests: From bbbeabf98ca8805086d2180c94f6ea29eb12a2ac Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 5 Sep 2023 13:08:25 +0200 Subject: [PATCH 097/139] Add support for ordering of input prompts (#662) ## Changes JSON schema properties are a map and thus unordered. This PR introduces a JSON schema extension field called `order` to allow template authors to define the order in which template variables should be resolved/prompted. ## Tests Unit tests. --------- Co-authored-by: Pieter Noordhuis --- libs/jsonschema/extension.go | 14 +++++++ libs/jsonschema/schema.go | 3 ++ libs/jsonschema/schema_order.go | 57 ++++++++++++++++++++++++++ libs/jsonschema/schema_order_test.go | 60 ++++++++++++++++++++++++++++ libs/template/config.go | 5 ++- 5 files changed, 138 insertions(+), 1 deletion(-) create mode 100644 libs/jsonschema/extension.go create mode 100644 libs/jsonschema/schema_order.go create mode 100644 libs/jsonschema/schema_order_test.go diff --git a/libs/jsonschema/extension.go b/libs/jsonschema/extension.go new file mode 100644 index 000000000..bbbde695b --- /dev/null +++ b/libs/jsonschema/extension.go @@ -0,0 +1,14 @@ +package jsonschema + +// Extension defines our custom JSON schema extensions. +// +// JSON schema supports custom extensions through vocabularies: +// https://json-schema.org/understanding-json-schema/reference/schema.html#vocabularies. +// We don't (yet?) define a meta-schema for the extensions below. +// It's not a big issue because the reach/scope of these extensions is limited. +type Extension struct { + // Order defines the order of a field with respect to other fields. + // If not defined, the field is ordered alphabetically after all fields + // that do have an order defined. + Order *int `json:"order,omitempty"` +} diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index c0d1736c1..87e9acd56 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -40,6 +40,9 @@ type Schema struct { // Default value for the property / object Default any `json:"default,omitempty"` + + // Extension embeds our custom JSON schema extensions. + Extension } type Type string diff --git a/libs/jsonschema/schema_order.go b/libs/jsonschema/schema_order.go new file mode 100644 index 000000000..3bc3e7d00 --- /dev/null +++ b/libs/jsonschema/schema_order.go @@ -0,0 +1,57 @@ +package jsonschema + +import ( + "slices" + "strings" +) + +// Property defines a single property of a struct schema. +// This type is not used in the schema itself but rather to +// return the pair of a property name and its schema. +type Property struct { + Name string + Schema *Schema +} + +// OrderedProperties returns the properties of the schema ordered according +// to the value of their `order` extension. If this extension is not set, the +// properties are ordered alphabetically. +func (s *Schema) OrderedProperties() []Property { + order := make(map[string]*int) + out := make([]Property, 0, len(s.Properties)) + for key, property := range s.Properties { + order[key] = property.Order + out = append(out, Property{ + Name: key, + Schema: property, + }) + } + + // Sort the properties by order and then by name. + slices.SortFunc(out, func(a, b Property) int { + oa := order[a.Name] + ob := order[b.Name] + cmp := 0 + switch { + case oa != nil && ob != nil: + // Compare the order values if both are set. + cmp = *oa - *ob + case oa == nil && ob != nil: + // If only one is set, the one that is set comes first. + cmp = 1 + case oa != nil && ob == nil: + // If only one is set, the one that is set comes first. + cmp = -1 + } + + // If we have a non-zero comparison, return it. + if cmp != 0 { + return cmp + } + + // If the order is the same, compare by name. + return strings.Compare(a.Name, b.Name) + }) + + return out +} diff --git a/libs/jsonschema/schema_order_test.go b/libs/jsonschema/schema_order_test.go new file mode 100644 index 000000000..56d4d6355 --- /dev/null +++ b/libs/jsonschema/schema_order_test.go @@ -0,0 +1,60 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestOrderedProperties(t *testing.T) { + newInt := func(i int) *int { + return &i + } + + s := Schema{ + Properties: map[string]*Schema{ + "bbb": { + Type: StringType, + }, + "ccc": { + Type: StringType, + }, + "ddd": { + Type: StringType, + }, + "zzz1": { + Type: StringType, + Extension: Extension{ + Order: newInt(-1), + }, + }, + "zzz2": { + Type: StringType, + Extension: Extension{ + Order: newInt(-2), + }, + }, + "aaa1": { + Type: StringType, + Extension: Extension{ + Order: newInt(1), + }, + }, + "aaa2": { + Type: StringType, + Extension: Extension{ + Order: newInt(2), + }, + }, + }, + } + + // Test that the properties are ordered by order and then by name. + properties := s.OrderedProperties() + names := make([]string, len(properties)) + for i, property := range properties { + names[i] = property.Name + } + + assert.Equal(t, []string{"zzz2", "zzz1", "aaa1", "aaa2", "bbb", "ccc", "ddd"}, names) +} diff --git a/libs/template/config.go b/libs/template/config.go index 302a13619..8a1ed6c82 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -117,7 +117,10 @@ func (c *config) assignDefaultValues() error { // Prompts user for values for properties that do not have a value set yet func (c *config) promptForValues() error { - for name, property := range c.schema.Properties { + for _, p := range c.schema.OrderedProperties() { + name := p.Name + property := p.Schema + // Config already has a value assigned if _, ok := c.values[name]; ok { continue From f62def3e77459cd0717d1ad04192c3162932930d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 5 Sep 2023 13:10:37 +0200 Subject: [PATCH 098/139] Replace API call to test configuration with dummy authenticate call (#728) ## Changes This reduces the latency of every workspace command by the duration of a single API call to retrieve the current user (which can take up to a full second). Note: the better place to verify that a request can be authenticated is the SDK itself. ## Tests * Unit test to confirm an the empty `*http.Request` can be constructed * Manually confirmed that the additional API call no longer happens --- cmd/root/auth.go | 26 +++++++++++++------------- cmd/root/auth_test.go | 14 ++++++++++++++ 2 files changed, 27 insertions(+), 13 deletions(-) create mode 100644 cmd/root/auth_test.go diff --git a/cmd/root/auth.go b/cmd/root/auth.go index e56074ef4..d4c9a31b9 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "net/http" "os" "github.com/databricks/cli/bundle" @@ -11,7 +12,6 @@ import ( "github.com/databricks/cli/libs/databrickscfg" "github.com/databricks/databricks-sdk-go" "github.com/databricks/databricks-sdk-go/config" - "github.com/databricks/databricks-sdk-go/service/iam" "github.com/manifoldco/promptui" "github.com/spf13/cobra" ) @@ -19,7 +19,6 @@ import ( // Placeholders to use as unique keys in context.Context. var workspaceClient int var accountClient int -var currentUser int func initProfileFlag(cmd *cobra.Command) { cmd.PersistentFlags().StringP("profile", "p", "", "~/.databrickscfg profile") @@ -94,8 +93,7 @@ TRY_AUTH: // or try picking a config profile dynamically if err != nil { return err } - // get current user identity also to verify validity of configuration - me, err := w.CurrentUser.Me(ctx) + err = w.Config.Authenticate(emptyHttpRequest(ctx)) if cmdio.IsInteractive(ctx) && errors.Is(err, config.ErrCannotConfigureAuth) { profile, err := askForWorkspaceProfile() if err != nil { @@ -107,7 +105,6 @@ TRY_AUTH: // or try picking a config profile dynamically if err != nil { return err } - ctx = context.WithValue(ctx, ¤tUser, me) ctx = context.WithValue(ctx, &workspaceClient, w) cmd.SetContext(ctx) return nil @@ -194,6 +191,17 @@ func askForAccountProfile() (string, error) { return profiles[i].Name, nil } +// To verify that a client is configured correctly, we pass an empty HTTP request +// to a client's `config.Authenticate` function. Note: this functionality +// should be supported by the SDK itself. +func emptyHttpRequest(ctx context.Context) *http.Request { + req, err := http.NewRequestWithContext(ctx, "", "", nil) + if err != nil { + panic(err) + } + return req +} + func WorkspaceClient(ctx context.Context) *databricks.WorkspaceClient { w, ok := ctx.Value(&workspaceClient).(*databricks.WorkspaceClient) if !ok { @@ -209,11 +217,3 @@ func AccountClient(ctx context.Context) *databricks.AccountClient { } return a } - -func Me(ctx context.Context) *iam.User { - me, ok := ctx.Value(¤tUser).(*iam.User) - if !ok { - panic("cannot get current user. Please report it as a bug") - } - return me -} diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go new file mode 100644 index 000000000..75d255b58 --- /dev/null +++ b/cmd/root/auth_test.go @@ -0,0 +1,14 @@ +package root + +import ( + "context" + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestEmptyHttpRequest(t *testing.T) { + ctx, _ := context.WithCancel(context.Background()) + req := emptyHttpRequest(ctx) + assert.Equal(t, req.Context(), ctx) +} From 947d5b1e5c45a523d3e66f33982361bae3f75c62 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Tue, 5 Sep 2023 04:20:55 -0700 Subject: [PATCH 099/139] Fix IsServicePrincipal() only working for workspace admins (#732) ## Changes The latest rendition of isServicePrincipal no longer worked for non-admin users as it used the "principals get" API. This new version relies on the property that service principals always have a UUID as their userName. This was tested with the eng-jaws principal (8b948b2e-d2b5-4b9e-8274-11b596f3b652). --- bundle/config/mutator/process_target_mode.go | 5 +---- libs/auth/service_principal.go | 21 ++++++++------------ libs/auth/service_principal_test.go | 19 ++++++++++++++++++ libs/template/helpers.go | 5 +---- 4 files changed, 29 insertions(+), 21 deletions(-) create mode 100644 libs/auth/service_principal_test.go diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index be93512bb..06ae7b858 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -160,10 +160,7 @@ func (m *processTargetMode) Apply(ctx context.Context, b *bundle.Bundle) error { } return transformDevelopmentMode(b) case config.Production: - isPrincipal, err := auth.IsServicePrincipal(ctx, b.WorkspaceClient(), b.Config.Workspace.CurrentUser.Id) - if err != nil { - return err - } + isPrincipal := auth.IsServicePrincipal(b.Config.Workspace.CurrentUser.Id) return validateProductionMode(ctx, b, isPrincipal) case "": // No action diff --git a/libs/auth/service_principal.go b/libs/auth/service_principal.go index a6740b503..cb488d16e 100644 --- a/libs/auth/service_principal.go +++ b/libs/auth/service_principal.go @@ -1,20 +1,15 @@ package auth import ( - "context" - - "github.com/databricks/databricks-sdk-go" - "github.com/databricks/databricks-sdk-go/apierr" + "github.com/google/uuid" ) // Determines whether a given user id is a service principal. -// This function uses a heuristic: if no user exists with this id, we assume -// it's a service principal. Unfortunately, the standard service principal API is too -// slow for our purposes. -func IsServicePrincipal(ctx context.Context, ws *databricks.WorkspaceClient, userId string) (bool, error) { - _, err := ws.Users.GetById(ctx, userId) - if apierr.IsMissing(err) { - return true, nil - } - return false, err +// This function uses a heuristic: if the user id is a UUID, then we assume +// it's a service principal. Unfortunately, the service principal listing API is too +// slow for our purposes. And the "users" and "service principals get" APIs +// only allow access by workspace admins. +func IsServicePrincipal(userId string) bool { + _, err := uuid.Parse(userId) + return err == nil } diff --git a/libs/auth/service_principal_test.go b/libs/auth/service_principal_test.go new file mode 100644 index 000000000..95e8ab5cb --- /dev/null +++ b/libs/auth/service_principal_test.go @@ -0,0 +1,19 @@ +package auth + +import ( + "testing" + + "github.com/stretchr/testify/assert" +) + +func TestIsServicePrincipal_ValidUUID(t *testing.T) { + userId := "8b948b2e-d2b5-4b9e-8274-11b596f3b652" + isSP := IsServicePrincipal(userId) + assert.True(t, isSP, "Expected user ID to be recognized as a service principal") +} + +func TestIsServicePrincipal_InvalidUUID(t *testing.T) { + userId := "invalid" + isSP := IsServicePrincipal(userId) + assert.False(t, isSP, "Expected user ID to not be recognized as a service principal") +} diff --git a/libs/template/helpers.go b/libs/template/helpers.go index f947d9ba8..29abbe21c 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -104,10 +104,7 @@ func loadHelpers(ctx context.Context) template.FuncMap { return false, err } } - result, err := auth.IsServicePrincipal(ctx, w, user.Id) - if err != nil { - return false, err - } + result := auth.IsServicePrincipal(user.Id) is_service_principal = &result return result, nil }, From 8c2cc07f7b3649df0cebb539e8fec81fccc07ed5 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Tue, 5 Sep 2023 04:58:34 -0700 Subject: [PATCH 100/139] databricks bundle init template v1 (#686) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes This adds a built-in "default-python" template to the CLI. This is based on the new default-template support of https://github.com/databricks/cli/pull/685. The goal here is to offer an experience where customers can simply type `databricks bundle init` to get a default template: ``` $ databricks bundle init Template to use [default-python]: default-python Unique name for this project [my_project]: my_project ✨ Successfully initialized template ``` The present template: - [x] Works well with VS Code - [x] Works well with the workspace - [x] Works well with DB Connect - [x] Uses minimal stubs rather than boiler-plate-heavy examples I'll have a followup with tests + DLT support. --------- Co-authored-by: Andrew Nester Co-authored-by: PaulCornellDB Co-authored-by: Pieter Noordhuis --- cmd/bundle/init.go | 2 +- .../databricks_template_schema.json | 2 +- .../template/{{.project_name}}/.gitignore | 9 +++ .../.vscode/__builtins__.pyi | 3 + .../{{.project_name}}/.vscode/extensions.json | 7 ++ .../{{.project_name}}/.vscode/settings.json | 14 ++++ .../template/{{.project_name}}/README.md | 3 - .../template/{{.project_name}}/README.md.tmpl | 37 +++++++++++ .../{{.project_name}}/databricks.yml.tmpl | 52 +++++++++++++++ .../{{.project_name}}/fixtures/.gitkeep.tmpl | 27 ++++++++ .../template/{{.project_name}}/pytest.ini | 3 + .../resources/{{.project_name}}_job.yml.tmpl | 42 ++++++++++++ .../{{.project_name}}/scratch/README.md | 4 ++ .../scratch/exploration.ipynb | 50 ++++++++++++++ .../template/{{.project_name}}/setup.py.tmpl | 24 +++++++ .../{{.project_name}}/src/notebook.ipynb.tmpl | 65 +++++++++++++++++++ .../src/{{.project_name}}/__init__.py | 1 + .../src/{{.project_name}}/main.py.tmpl | 16 +++++ .../{{.project_name}}/tests/main_test.py.tmpl | 5 ++ 19 files changed, 361 insertions(+), 5 deletions(-) create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/.gitignore create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json delete mode 100644 libs/template/templates/default-python/template/{{.project_name}}/README.md create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/pytest.ini create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index 2127a7bc4..bf68e921c 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -59,7 +59,7 @@ func newInitCommand() *cobra.Command { } else { return errors.New("please specify a template") - /* TODO: propose to use default-python (once template is ready) + /* TODO: propose to use default-python (once #708 is merged) var err error if !cmdio.IsOutTTY(ctx) || !cmdio.IsInTTY(ctx) { return errors.New("please specify a template") diff --git a/libs/template/templates/default-python/databricks_template_schema.json b/libs/template/templates/default-python/databricks_template_schema.json index b680c5fbb..3220e9a6f 100644 --- a/libs/template/templates/default-python/databricks_template_schema.json +++ b/libs/template/templates/default-python/databricks_template_schema.json @@ -3,7 +3,7 @@ "project_name": { "type": "string", "default": "my_project", - "description": "Name of the directory" + "description": "Unique name for this project" } } } diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.gitignore b/libs/template/templates/default-python/template/{{.project_name}}/.gitignore new file mode 100644 index 000000000..aa87f0198 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.gitignore @@ -0,0 +1,9 @@ + +.databricks/ +build/ +dist/ +__pycache__/ +*.egg-info +.venv/ +scratch/** +!scratch/README.md diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi new file mode 100644 index 000000000..0edd5181b --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/__builtins__.pyi @@ -0,0 +1,3 @@ +# Typings for Pylance in Visual Studio Code +# see https://github.com/microsoft/pyright/blob/main/docs/builtins.md +from databricks.sdk.runtime import * diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json new file mode 100644 index 000000000..5d15eba36 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "databricks.databricks", + "ms-python.vscode-pylance", + "redhat.vscode-yaml" + ] +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json new file mode 100644 index 000000000..16cb2c96a --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/.vscode/settings.json @@ -0,0 +1,14 @@ +{ + "python.analysis.stubPath": ".vscode", + "databricks.python.envFile": "${workspaceFolder}/.env", + "jupyter.interactiveWindow.cellMarker.codeRegex": "^# COMMAND ----------|^# Databricks notebook source|^(#\\s*%%|#\\s*\\|#\\s*In\\[\\d*?\\]|#\\s*In\\[ \\])", + "jupyter.interactiveWindow.cellMarker.default": "# COMMAND ----------", + "python.testing.pytestArgs": [ + "." + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "files.exclude": { + "**/*.egg-info": true + }, +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md b/libs/template/templates/default-python/template/{{.project_name}}/README.md deleted file mode 100644 index 3187b9ed0..000000000 --- a/libs/template/templates/default-python/template/{{.project_name}}/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# {{.project_name}} - -The '{{.project_name}}' bundle was generated using the default-python template. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl new file mode 100644 index 000000000..4c89435b4 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl @@ -0,0 +1,37 @@ +# {{.project_name}} + +The '{{.project_name}}' project was generated by using the default-python template. + +## Getting started + +1. Install the Databricks CLI from https://docs.databricks.com/dev-tools/cli/databricks-cli.html + +2. Authenticate to your Databricks workspace: + ``` + $ databricks configure + ``` + +3. To deploy a development copy of this project, type: + ``` + $ databricks bundle deploy --target dev + ``` + (Note that "dev" is the default target, so the `--target` parameter + is optional here.) + + This deploys everything that's defined for this project. + For example, the default template would deploy a job called + `[dev yourname] {{.project_name}}-job` to your workspace. + You can find that job by opening your workpace and clicking on **Workflows**. + +4. Similarly, to deploy a production copy, type: + ``` + $ databricks bundle deploy --target prod + ``` + +5. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from + https://docs.databricks.com/dev-tools/vscode-ext.html. Or read the "getting started" documentation for + **Databricks Connect** for instructions on running the included Python code from a different IDE. + +6. For documentation on the Databricks asset bundles format used + for this project, and for CI/CD configuration, see + https://docs.databricks.com/dev-tools/bundles/index.html. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl new file mode 100644 index 000000000..48aef0ea3 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl @@ -0,0 +1,52 @@ +# This is a Databricks asset bundle definition for {{.project_name}}. +# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation. +bundle: + name: {{.project_name}} + +include: + - resources/*.yml + +targets: + # The 'dev' target, used development purposes. + # Whenever a developer deploys using 'dev', they get their own copy. + dev: + # We use 'mode: development' to make everything deployed to this target gets a prefix + # like '[dev my_user_name]'. Setting this mode also disables any schedules and + # automatic triggers for jobs and enables the 'development' mode for Delta Live Tables pipelines. + mode: development + default: true + workspace: + host: {{workspace_host}} + + # Optionally, there could be a 'staging' target here. + # (See Databricks docs on CI/CD at https://docs.databricks.com/dev-tools/bundles/index.html.) + # + # staging: + # workspace: + # host: {{workspace_host}} + + # The 'prod' target, used for production deployment. + prod: + # For production deployments, we only have a single copy, so we override the + # workspace.root_path default of + # /Users/${workspace.current_user.userName}/.bundle/${bundle.target}/${bundle.name} + # to a path that is not specific to the current user. + {{- /* + Explaining 'mode: production' isn't as pressing as explaining 'mode: development'. + As we already talked about the other mode above, users can just + look at documentation or ask the assistant about 'mode: production'. + # + # By making use of 'mode: production' we enable strict checks + # to make sure we have correctly configured this target. + */}} + mode: production + workspace: + host: {{workspace_host}} + root_path: /Shared/.bundle/prod/${bundle.name} + {{- if not is_service_principal}} + run_as: + # This runs as {{user_name}} in production. Alternatively, + # a service principal could be used here using service_principal_name + # (see Databricks documentation). + user_name: {{user_name}} + {{end -}} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl new file mode 100644 index 000000000..361c681f9 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl @@ -0,0 +1,27 @@ +# Fixtures +{{- /* +We don't want to have too many README.md files, since they +stand out so much. But we do need to have a file here to make +sure the folder is added to Git. +*/}} + +This folder is reserved for fixtures, such as CSV files. + +Below is an example of how to load fixtures as a data frame: + +``` +import pandas as pd +import os + +def get_absolute_path(*relative_parts): + if 'dbutils' in globals(): + base_dir = os.path.dirname(dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()) # type: ignore + path = os.path.normpath(os.path.join(base_dir, *relative_parts)) + return path if path.startswith("/Workspace") else os.path.join("/Workspace", path) + else: + return os.path.join(*relative_parts) + +csv_file = get_absolute_path("..", "fixtures", "mycsv.csv") +df = pd.read_csv(csv_file) +display(df) +``` diff --git a/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini b/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini new file mode 100644 index 000000000..80432c220 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +testpaths = tests +pythonpath = src diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl new file mode 100644 index 000000000..f8116cdfc --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl @@ -0,0 +1,42 @@ +# The main job for {{.project_name}} +resources: + + jobs: + {{.project_name}}_job: + name: {{.project_name}}_job + + schedule: + quartz_cron_expression: '44 37 8 * * ?' + timezone_id: Europe/Amsterdam + + {{- if not is_service_principal}} + email_notifications: + on_failure: + - {{user_name}} + {{end -}} + + tasks: + - task_key: notebook_task + job_cluster_key: job_cluster + notebook_task: + notebook_path: ../src/notebook.ipynb + + - task_key: python_wheel_task + depends_on: + - task_key: notebook_task + job_cluster_key: job_cluster + python_wheel_task: + package_name: {{.project_name}} + entry_point: main + libraries: + - whl: ../dist/*.whl + + job_clusters: + - job_cluster_key: job_cluster + new_cluster: + {{- /* we should always use an LTS version in our templates */}} + spark_version: 13.3.x-scala2.12 + node_type_id: {{smallest_node_type}} + autoscale: + min_workers: 1 + max_workers: 4 diff --git a/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md b/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md new file mode 100644 index 000000000..e6cfb81b4 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/scratch/README.md @@ -0,0 +1,4 @@ +# scratch + +This folder is reserved for personal, exploratory notebooks. +By default these are not committed to Git, as 'scratch' is listed in .gitignore. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb new file mode 100644 index 000000000..2ee36c3c1 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb @@ -0,0 +1,50 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "6bca260b-13d1-448f-8082-30b60a85c9ae", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "import sys\n", + "sys.path.append('../src')\n", + "from project import main\n", + "\n", + "main.taxis.show(10)" + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "ipynb-notebook", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl new file mode 100644 index 000000000..93f4e9ff9 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl @@ -0,0 +1,24 @@ +""" +Setup script for {{.project_name}}. + +This script packages and distributes the associated wheel file(s). +Source code is in ./src/. Run 'python setup.py sdist bdist_wheel' to build. +""" +from setuptools import setup, find_packages + +import sys +sys.path.append('./src') + +import {{.project_name}} + +setup( + name="{{.project_name}}", + version={{.project_name}}.__version__, + url="https://databricks.com", + author="{{.user_name}}", + description="my test wheel", + packages=find_packages(where='./src'), + package_dir={'': 'src'}, + entry_points={"entry_points": "main={{.project_name}}.main:main"}, + install_requires=["setuptools"], +) diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl new file mode 100644 index 000000000..26c743032 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl @@ -0,0 +1,65 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "ee353e42-ff58-4955-9608-12865bd0950e", + "showTitle": false, + "title": "" + } + }, + "source": [ + "# Default notebook\n", + "\n", + "This default notebook is executed using Databricks Workflows as defined in resources/{{.my_project}}_job.yml." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": { + "byteLimit": 2048000, + "rowLimit": 10000 + }, + "inputWidgets": {}, + "nuid": "6bca260b-13d1-448f-8082-30b60a85c9ae", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + "from {{.project_name}} import main\n", + "\n", + "main.get_taxis().show(10)\n" + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "notebook", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py new file mode 100644 index 000000000..f102a9cad --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.1" diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl new file mode 100644 index 000000000..4fe5ac8f4 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/{{.project_name}}/main.py.tmpl @@ -0,0 +1,16 @@ +{{- /* +We use pyspark.sql rather than DatabricksSession.builder.getOrCreate() +for compatibility with older runtimes. With a new runtime, it's +equivalent to DatabricksSession.builder.getOrCreate(). +*/ -}} +from pyspark.sql import SparkSession + +def get_taxis(): + spark = SparkSession.builder.getOrCreate() + return spark.read.table("samples.nyctaxi.trips") + +def main(): + get_taxis().show(5) + +if __name__ == '__main__': + main() diff --git a/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl new file mode 100644 index 000000000..92afccc6c --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl @@ -0,0 +1,5 @@ +from {{.project_name}} import main + +def test_main(): + taxis = main.get_taxis() + assert taxis.count() == 5 From e533f9109a04295b50b2bc032f4f934e6bb25ead Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Tue, 5 Sep 2023 06:57:01 -0700 Subject: [PATCH 101/139] Show 'databricks bundle init' template in CLI prompt (#725) ~(this should be changed to target `main`)~ This reveals the template from https://github.com/databricks/cli/pull/686 in CLI prompts for once #686 and #708 are merged. --------- Co-authored-by: Andrew Nester Co-authored-by: PaulCornellDB Co-authored-by: Pieter Noordhuis --- cmd/bundle/init.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index bf68e921c..9a11eb257 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/template" "github.com/spf13/cobra" @@ -57,9 +58,6 @@ func newInitCommand() *cobra.Command { if len(args) > 0 { templatePath = args[0] } else { - return errors.New("please specify a template") - - /* TODO: propose to use default-python (once #708 is merged) var err error if !cmdio.IsOutTTY(ctx) || !cmdio.IsInTTY(ctx) { return errors.New("please specify a template") @@ -68,7 +66,6 @@ func newInitCommand() *cobra.Command { if err != nil { return err } - */ } if !isRepoUrl(templatePath) { From 9194418ac16310bc24ac25f90845af338f4518bd Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 5 Sep 2023 16:25:26 +0200 Subject: [PATCH 102/139] Fix regex error check in mkdir integration test (#735) ## Changes Fixes test for all cloud provider after the Go SDK bump which introduces the `non retryable error` prefix to errors. The test passes now. --- internal/fs_mkdir_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/fs_mkdir_test.go b/internal/fs_mkdir_test.go index b743ebb79..25117d532 100644 --- a/internal/fs_mkdir_test.go +++ b/internal/fs_mkdir_test.go @@ -112,6 +112,6 @@ func TestAccFsMkdirWhenFileExistsAtPath(t *testing.T) { // assert run fails _, _, err = RequireErrorRun(t, "fs", "mkdir", "dbfs:"+path.Join(tmpDir, "hello")) // Different cloud providers return different errors. - regex := regexp.MustCompile(`(^|: )Path is a file: .*$|^Cannot create directory .* because .* is an existing file\.$|^mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) + regex := regexp.MustCompile(`(^|: )Path is a file: .*$|(^|: )Cannot create directory .* because .* is an existing file\.$|(^|: )mkdirs\(hadoopPath: .*, permission: rwxrwxrwx\): failed$`) assert.Regexp(t, regex, err.Error()) } From fabe8e88b8abca3993d40148c966df8d9e924318 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 6 Sep 2023 09:54:35 +0200 Subject: [PATCH 103/139] Include $PATH in set of environment variables to pass along. (#736) ## Changes This is necessary to ensure that our Terraform provider can use the same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI. ## Tests Unit test and manual verification. --- bundle/deploy/terraform/init.go | 8 ++++++++ bundle/deploy/terraform/init_test.go | 2 ++ 2 files changed, 10 insertions(+) diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 6df7b8d48..878c4e8b2 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -78,6 +78,14 @@ func inheritEnvVars(env map[string]string) error { env["HOME"] = home } + // Include $PATH in set of environment variables to pass along. + // This is necessary to ensure that our Terraform provider can use the + // same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI. + path, ok := os.LookupEnv("PATH") + if ok { + env["PATH"] = path + } + // Include $TF_CLI_CONFIG_FILE to override terraform provider in development. configFile, ok := os.LookupEnv("TF_CLI_CONFIG_FILE") if ok { diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index 5bb5929e6..b94593878 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -277,6 +277,7 @@ func TestInheritEnvVars(t *testing.T) { env := map[string]string{} t.Setenv("HOME", "/home/testuser") + t.Setenv("PATH", "/foo:/bar") t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc") err := inheritEnvVars(env) @@ -285,6 +286,7 @@ func TestInheritEnvVars(t *testing.T) { require.Equal(t, map[string]string{ "HOME": "/home/testuser", + "PATH": "/foo:/bar", "TF_CLI_CONFIG_FILE": "/tmp/config.tfrc", }, env) } From a41b9e8bf2aa0a25898e48cf25f24518d33a5c84 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Wed, 6 Sep 2023 10:41:47 +0200 Subject: [PATCH 104/139] Added description for version command (#737) ## Changes Added description for version command ## Tests ``` databricks help ... Additional Commands: account Databricks Account Commands api Perform Databricks API call auth Authentication related commands bundle Databricks Asset Bundles completion Generate the autocompletion script for the specified shell fs Filesystem related commands help Help about any command sync Synchronize a local directory to a workspace directory version Retrieve information about the current version of CLI ``` --------- Co-authored-by: Pieter Noordhuis --- cmd/version/version.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/version/version.go b/cmd/version/version.go index 17bb4b9af..653fbb897 100644 --- a/cmd/version/version.go +++ b/cmd/version/version.go @@ -8,9 +8,9 @@ import ( func New() *cobra.Command { cmd := &cobra.Command{ - Use: "version", - Args: cobra.NoArgs, - + Use: "version", + Args: cobra.NoArgs, + Short: "Retrieve information about the current version of this CLI", Annotations: map[string]string{ "template": "Databricks CLI v{{.Version}}\n", }, From f9e521b43e1e19b5ae52ca1c512f6690204e8b2a Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Wed, 6 Sep 2023 11:52:31 +0200 Subject: [PATCH 105/139] databricks bundle init template v2: optional stubs, DLT support (#700) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## Changes This follows up on https://github.com/databricks/cli/pull/686. This PR makes our stubs optional + it adds DLT stubs: ``` $ databricks bundle init Template to use [default-python]: default-python Unique name for this project [my_project]: my_project Include a stub (sample) notebook in 'my_project/src' [yes]: yes Include a stub (sample) DLT pipeline in 'my_project/src' [yes]: yes Include a stub (sample) Python package 'my_project/src' [yes]: yes ✨ Successfully initialized template ``` ## Tests Manual testing, matrix tests. --------- Co-authored-by: Andrew Nester Co-authored-by: PaulCornellDB Co-authored-by: Pieter Noordhuis --- bundle/bundle.go | 4 + .../config/mutator/populate_current_user.go | 4 + bundle/deploy/terraform/apply.go | 4 + bundle/deploy/terraform/convert.go | 9 +- bundle/deploy/terraform/convert_test.go | 18 +-- bundle/deploy/terraform/write.go | 3 +- libs/template/helpers.go | 25 ++-- libs/template/renderer.go | 16 ++- libs/template/renderer_test.go | 96 +++++++++++++++ .../databricks_template_schema.json | 27 ++++- .../templates/default-python/defaults.json | 5 +- .../default-python/template/__preamble.tmpl | 38 ++++++ .../template/{{.project_name}}/README.md.tmpl | 15 ++- .../{{.project_name}}/resources/.gitkeep | 1 + .../resources/{{.project_name}}_job.yml.tmpl | 28 ++++- .../{{.project_name}}_pipeline.yml.tmpl | 12 ++ ...ploration.ipynb => exploration.ipynb.tmpl} | 8 +- .../src/dlt_pipeline.ipynb.tmpl | 112 ++++++++++++++++++ .../{{.project_name}}/src/notebook.ipynb.tmpl | 6 +- .../{{.project_name}}/tests/main_test.py.tmpl | 2 +- 20 files changed, 393 insertions(+), 40 deletions(-) create mode 100644 libs/template/templates/default-python/template/__preamble.tmpl create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl rename libs/template/templates/default-python/template/{{.project_name}}/scratch/{exploration.ipynb => exploration.ipynb.tmpl} (84%) create mode 100644 libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl diff --git a/bundle/bundle.go b/bundle/bundle.go index d69d58158..8175ce283 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -37,6 +37,10 @@ type Bundle struct { // Stores an initialized copy of this bundle's Terraform wrapper. Terraform *tfexec.Terraform + // Indicates that the Terraform definition based on this bundle is empty, + // i.e. that it would deploy no resources. + TerraformHasNoResources bool + // Stores the locker responsible for acquiring/releasing a deployment lock. Locker *locker.Locker diff --git a/bundle/config/mutator/populate_current_user.go b/bundle/config/mutator/populate_current_user.go index cbaa2d30b..bba0457c4 100644 --- a/bundle/config/mutator/populate_current_user.go +++ b/bundle/config/mutator/populate_current_user.go @@ -21,6 +21,10 @@ func (m *populateCurrentUser) Name() string { } func (m *populateCurrentUser) Apply(ctx context.Context, b *bundle.Bundle) error { + if b.Config.Workspace.CurrentUser != nil { + return nil + } + w := b.WorkspaceClient() me, err := w.CurrentUser.Me(ctx) if err != nil { diff --git a/bundle/deploy/terraform/apply.go b/bundle/deploy/terraform/apply.go index ab868f765..53cffbbaf 100644 --- a/bundle/deploy/terraform/apply.go +++ b/bundle/deploy/terraform/apply.go @@ -16,6 +16,10 @@ func (w *apply) Name() string { } func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) error { + if b.TerraformHasNoResources { + cmdio.LogString(ctx, "Note: there are no resources to deploy for this bundle") + return nil + } tf := b.Terraform if tf == nil { return fmt.Errorf("terraform not initialized") diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index ac68bd359..41bde91d8 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -49,12 +49,14 @@ func convPermission(ac resources.Permission) schema.ResourcePermissionsAccessCon // // NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO // CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT. -func BundleToTerraform(config *config.Root) *schema.Root { +func BundleToTerraform(config *config.Root) (*schema.Root, bool) { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() tfroot.Resource = schema.NewResources() + noResources := true for k, src := range config.Resources.Jobs { + noResources = false var dst schema.ResourceJob conv(src, &dst) @@ -100,6 +102,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Pipelines { + noResources = false var dst schema.ResourcePipeline conv(src, &dst) @@ -127,6 +130,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Models { + noResources = false var dst schema.ResourceMlflowModel conv(src, &dst) tfroot.Resource.MlflowModel[k] = &dst @@ -139,6 +143,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } for k, src := range config.Resources.Experiments { + noResources = false var dst schema.ResourceMlflowExperiment conv(src, &dst) tfroot.Resource.MlflowExperiment[k] = &dst @@ -150,7 +155,7 @@ func BundleToTerraform(config *config.Root) *schema.Root { } } - return tfroot + return tfroot, noResources } func TerraformToBundle(state *tfjson.State, config *config.Root) error { diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index c47824ec5..4d912fbe0 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -40,7 +40,7 @@ func TestConvertJob(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) assert.Len(t, out.Resource.Job["my_job"].JobCluster, 1) assert.Equal(t, "https://github.com/foo/bar", out.Resource.Job["my_job"].GitSource.Url) @@ -65,7 +65,7 @@ func TestConvertJobPermissions(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["job_my_job"].JobId) assert.Len(t, out.Resource.Permissions["job_my_job"].AccessControl, 1) @@ -101,7 +101,7 @@ func TestConvertJobTaskLibraries(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) require.Len(t, out.Resource.Job["my_job"].Task, 1) require.Len(t, out.Resource.Job["my_job"].Task[0].Library, 1) @@ -135,7 +135,7 @@ func TestConvertPipeline(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.Equal(t, "my pipeline", out.Resource.Pipeline["my_pipeline"].Name) assert.Len(t, out.Resource.Pipeline["my_pipeline"].Library, 2) assert.Nil(t, out.Data) @@ -159,7 +159,7 @@ func TestConvertPipelinePermissions(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["pipeline_my_pipeline"].PipelineId) assert.Len(t, out.Resource.Permissions["pipeline_my_pipeline"].AccessControl, 1) @@ -194,7 +194,7 @@ func TestConvertModel(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.Equal(t, "name", out.Resource.MlflowModel["my_model"].Name) assert.Equal(t, "description", out.Resource.MlflowModel["my_model"].Description) assert.Len(t, out.Resource.MlflowModel["my_model"].Tags, 2) @@ -223,7 +223,7 @@ func TestConvertModelPermissions(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["mlflow_model_my_model"].RegisteredModelId) assert.Len(t, out.Resource.Permissions["mlflow_model_my_model"].AccessControl, 1) @@ -247,7 +247,7 @@ func TestConvertExperiment(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.Equal(t, "name", out.Resource.MlflowExperiment["my_experiment"].Name) assert.Nil(t, out.Data) } @@ -270,7 +270,7 @@ func TestConvertExperimentPermissions(t *testing.T) { }, } - out := BundleToTerraform(&config) + out, _ := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["mlflow_experiment_my_experiment"].ExperimentId) assert.Len(t, out.Resource.Permissions["mlflow_experiment_my_experiment"].AccessControl, 1) diff --git a/bundle/deploy/terraform/write.go b/bundle/deploy/terraform/write.go index b40a70531..0bf9ab24a 100644 --- a/bundle/deploy/terraform/write.go +++ b/bundle/deploy/terraform/write.go @@ -21,7 +21,8 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - root := BundleToTerraform(&b.Config) + root, noResources := BundleToTerraform(&b.Config) + b.TerraformHasNoResources = noResources f, err := os.Create(filepath.Join(dir, "bundle.tf.json")) if err != nil { return err diff --git a/libs/template/helpers.go b/libs/template/helpers.go index 29abbe21c..317522703 100644 --- a/libs/template/helpers.go +++ b/libs/template/helpers.go @@ -26,9 +26,10 @@ type pair struct { v any } +var cachedUser *iam.User +var cachedIsServicePrincipal *bool + func loadHelpers(ctx context.Context) template.FuncMap { - var user *iam.User - var is_service_principal *bool w := root.WorkspaceClient(ctx) return template.FuncMap{ "fail": func(format string, args ...any) (any, error) { @@ -80,32 +81,32 @@ func loadHelpers(ctx context.Context) template.FuncMap { return w.Config.Host, nil }, "user_name": func() (string, error) { - if user == nil { + if cachedUser == nil { var err error - user, err = w.CurrentUser.Me(ctx) + cachedUser, err = w.CurrentUser.Me(ctx) if err != nil { return "", err } } - result := user.UserName + result := cachedUser.UserName if result == "" { - result = user.Id + result = cachedUser.Id } return result, nil }, "is_service_principal": func() (bool, error) { - if is_service_principal != nil { - return *is_service_principal, nil + if cachedIsServicePrincipal != nil { + return *cachedIsServicePrincipal, nil } - if user == nil { + if cachedUser == nil { var err error - user, err = w.CurrentUser.Me(ctx) + cachedUser, err = w.CurrentUser.Me(ctx) if err != nil { return false, err } } - result := auth.IsServicePrincipal(user.Id) - is_service_principal = &result + result := auth.IsServicePrincipal(cachedUser.Id) + cachedIsServicePrincipal = &result return result, nil }, } diff --git a/libs/template/renderer.go b/libs/template/renderer.go index f4bd99d2c..f674ea0fb 100644 --- a/libs/template/renderer.go +++ b/libs/template/renderer.go @@ -9,6 +9,7 @@ import ( "path" "path/filepath" "slices" + "sort" "strings" "text/template" @@ -214,17 +215,22 @@ func (r *renderer) walk() error { // Add skip function, which accumulates skip patterns relative to current // directory r.baseTemplate.Funcs(template.FuncMap{ - "skip": func(relPattern string) string { + "skip": func(relPattern string) (string, error) { // patterns are specified relative to current directory of the file // the {{skip}} function is called from. - pattern := path.Join(currentDirectory, relPattern) + patternRaw := path.Join(currentDirectory, relPattern) + pattern, err := r.executeTemplate(patternRaw) + if err != nil { + return "", err + } + if !slices.Contains(r.skipPatterns, pattern) { logger.Infof(r.ctx, "adding skip pattern: %s", pattern) r.skipPatterns = append(r.skipPatterns, pattern) } // return empty string will print nothing at function call site // when executing the template - return "" + return "", nil }, }) @@ -239,6 +245,10 @@ func (r *renderer) walk() error { if err != nil { return err } + // Sort by name to ensure deterministic ordering + sort.Slice(entries, func(i, j int) bool { + return entries[i].Name() < entries[j].Name() + }) for _, entry := range entries { if entry.IsDir() { // Add to slice, for BFS traversal diff --git a/libs/template/renderer_test.go b/libs/template/renderer_test.go index a2e5675e8..21dd1e4fa 100644 --- a/libs/template/renderer_test.go +++ b/libs/template/renderer_test.go @@ -12,7 +12,14 @@ import ( "testing" "text/template" + "github.com/databricks/cli/bundle" + bundleConfig "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/cmd/root" + "github.com/databricks/databricks-sdk-go" + workspaceConfig "github.com/databricks/databricks-sdk-go/config" + "github.com/databricks/databricks-sdk-go/service/iam" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -29,6 +36,95 @@ func assertFilePermissions(t *testing.T, path string, perm fs.FileMode) { assert.Equal(t, perm, info.Mode().Perm()) } +func assertBuiltinTemplateValid(t *testing.T, settings map[string]any, target string, isServicePrincipal bool, build bool, tempDir string) { + ctx := context.Background() + + templatePath, err := prepareBuiltinTemplates("default-python", tempDir) + require.NoError(t, err) + + w := &databricks.WorkspaceClient{ + Config: &workspaceConfig.Config{Host: "https://myhost.com"}, + } + + // Prepare helpers + cachedUser = &iam.User{UserName: "user@domain.com"} + cachedIsServicePrincipal = &isServicePrincipal + ctx = root.SetWorkspaceClient(ctx, w) + helpers := loadHelpers(ctx) + + renderer, err := newRenderer(ctx, settings, helpers, templatePath, "./testdata/template-in-path/library", tempDir) + require.NoError(t, err) + + // Evaluate template + err = renderer.walk() + require.NoError(t, err) + err = renderer.persistToDisk() + require.NoError(t, err) + b, err := bundle.Load(ctx, filepath.Join(tempDir, "template", "my_project")) + require.NoError(t, err) + + // Apply initialize / validation mutators + b.Config.Workspace.CurrentUser = &bundleConfig.User{User: cachedUser} + b.WorkspaceClient() + b.Config.Bundle.Terraform = &bundleConfig.Terraform{ + ExecPath: "sh", + } + err = bundle.Apply(ctx, b, bundle.Seq( + bundle.Seq(mutator.DefaultMutators()...), + mutator.SelectTarget(target), + phases.Initialize(), + )) + require.NoError(t, err) + + // Apply build mutator + if build { + err = bundle.Apply(ctx, b, phases.Build()) + require.NoError(t, err) + } +} + +func TestBuiltinTemplateValid(t *testing.T) { + // Test option combinations + options := []string{"yes", "no"} + isServicePrincipal := false + build := false + for _, includeNotebook := range options { + for _, includeDlt := range options { + for _, includePython := range options { + for _, isServicePrincipal := range []bool{true, false} { + config := map[string]any{ + "project_name": "my_project", + "include_notebook": includeNotebook, + "include_dlt": includeDlt, + "include_python": includePython, + } + tempDir := t.TempDir() + assertBuiltinTemplateValid(t, config, "dev", isServicePrincipal, build, tempDir) + } + } + } + } + + // Test prod mode + build + config := map[string]any{ + "project_name": "my_project", + "include_notebook": "yes", + "include_dlt": "yes", + "include_python": "yes", + } + isServicePrincipal = false + build = true + + // On Windows, we can't always remove the resulting temp dir since background + // processes might have it open, so we use 'defer' for a best-effort cleanup + tempDir, err := os.MkdirTemp("", "templates") + require.NoError(t, err) + defer os.RemoveAll(tempDir) + + assertBuiltinTemplateValid(t, config, "prod", isServicePrincipal, build, tempDir) + defer os.RemoveAll(tempDir) +} + func TestRendererWithAssociatedTemplateInLibrary(t *testing.T) { tmpDir := t.TempDir() diff --git a/libs/template/templates/default-python/databricks_template_schema.json b/libs/template/templates/default-python/databricks_template_schema.json index 3220e9a6f..22c65f309 100644 --- a/libs/template/templates/default-python/databricks_template_schema.json +++ b/libs/template/templates/default-python/databricks_template_schema.json @@ -3,7 +3,32 @@ "project_name": { "type": "string", "default": "my_project", - "description": "Unique name for this project" + "description": "Unique name for this project", + "order": 1 + }, + "include_notebook": { + "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", + "type": "string", + "default": "yes", + "pattern": "^(yes|no)$", + "description": "Include a stub (sample) notebook in 'my_project/src'", + "order": 2 + }, + "include_dlt": { + "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", + "type": "string", + "default": "yes", + "pattern": "^(yes|no)$", + "description": "Include a stub (sample) DLT pipeline in 'my_project/src'", + "order": 3 + }, + "include_python": { + "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", + "type": "string", + "default": "yes", + "pattern": "^(yes|no)$", + "description": "Include a stub (sample) Python package 'my_project/src'", + "order": 4 } } } diff --git a/libs/template/templates/default-python/defaults.json b/libs/template/templates/default-python/defaults.json index 99ecd36d2..510ec4a3d 100644 --- a/libs/template/templates/default-python/defaults.json +++ b/libs/template/templates/default-python/defaults.json @@ -1,3 +1,6 @@ { - "project_name": "my_project" + "project_name": "my_project", + "include_notebook": "yes", + "include_dlt": "yes", + "include_python": "yes" } diff --git a/libs/template/templates/default-python/template/__preamble.tmpl b/libs/template/templates/default-python/template/__preamble.tmpl new file mode 100644 index 000000000..c018f2825 --- /dev/null +++ b/libs/template/templates/default-python/template/__preamble.tmpl @@ -0,0 +1,38 @@ +# Preamble + +This file only template directives; it is skipped for the actual output. + +{{skip "__preamble"}} + +{{ $value := .project_name }} +{{with (regexp "^[A-Za-z0-9_]*$")}} + {{if not (.MatchString $value)}} + {{fail "Invalid project_name: %s. Must consist of letter and underscores only." $value}} + {{end}} +{{end}} + +{{$notDLT := not (eq .include_dlt "yes")}} +{{$notNotebook := not (eq .include_notebook "yes")}} +{{$notPython := not (eq .include_python "yes")}} + +{{if $notPython}} + {{skip "{{.project_name}}/src/{{.project_name}}"}} + {{skip "{{.project_name}}/tests/test_main.py"}} + {{skip "{{.project_name}}/setup.py"}} + {{skip "{{.project_name}}/pytest.ini"}} +{{end}} + +{{if $notDLT}} + {{skip "{{.project_name}}/src/dlt_pipeline.ipynb"}} + {{skip "{{.project_name}}/resources/{{.project_name}}_pipeline.yml"}} +{{end}} + +{{if $notNotebook}} + {{skip "{{.project_name}}/src/notebook.iypnb"}} +{{end}} + +{{if (and $notDLT $notNotebook $notPython)}} + {{skip "{{.project_name}}/resources/{{.project_name}}_job.yml"}} +{{else}} + {{skip "{{.project_name}}/resources/.gitkeep"}} +{{end}} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl index 4c89435b4..7c8876e75 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl @@ -28,10 +28,17 @@ The '{{.project_name}}' project was generated by using the default-python templa $ databricks bundle deploy --target prod ``` -5. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from - https://docs.databricks.com/dev-tools/vscode-ext.html. Or read the "getting started" documentation for - **Databricks Connect** for instructions on running the included Python code from a different IDE. +5. To run a job or pipeline, use the "run" comand: + ``` + $ databricks bundle run {{.project_name}}_job + ``` -6. For documentation on the Databricks asset bundles format used +6. Optionally, install developer tools such as the Databricks extension for Visual Studio Code from + https://docs.databricks.com/dev-tools/vscode-ext.html. +{{- if (eq .include_python "yes") }} Or read the "getting started" documentation for + **Databricks Connect** for instructions on running the included Python code from a different IDE. +{{- end}} + +7. For documentation on the Databricks asset bundles format used for this project, and for CI/CD configuration, see https://docs.databricks.com/dev-tools/bundles/index.html. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep b/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep new file mode 100644 index 000000000..3e09c14c1 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/.gitkeep @@ -0,0 +1 @@ +This folder is reserved for Databricks Asset Bundles resource definitions. diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl index f8116cdfc..1792f9479 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_job.yml.tmpl @@ -1,6 +1,5 @@ # The main job for {{.project_name}} resources: - jobs: {{.project_name}}_job: name: {{.project_name}}_job @@ -10,20 +9,41 @@ resources: timezone_id: Europe/Amsterdam {{- if not is_service_principal}} + email_notifications: on_failure: - {{user_name}} + + {{else}} + {{end -}} tasks: + {{- if eq .include_notebook "yes" }} - task_key: notebook_task job_cluster_key: job_cluster notebook_task: notebook_path: ../src/notebook.ipynb - - - task_key: python_wheel_task + {{end -}} + {{- if (eq .include_dlt "yes") }} + - task_key: refresh_pipeline + {{- if (eq .include_notebook "yes" )}} depends_on: - task_key: notebook_task + {{- end}} + pipeline_task: + {{- /* TODO: we should find a way that doesn't use magics for the below, like ./{{project_name}}_pipeline.yml */}} + pipeline_id: ${resources.pipelines.{{.project_name}}_pipeline.id} + {{end -}} + {{- if (eq .include_python "yes") }} + - task_key: main_task + {{- if (eq .include_dlt "yes") }} + depends_on: + - task_key: refresh_pipeline + {{- else if (eq .include_notebook "yes" )}} + depends_on: + - task_key: notebook_task + {{end}} job_cluster_key: job_cluster python_wheel_task: package_name: {{.project_name}} @@ -31,6 +51,8 @@ resources: libraries: - whl: ../dist/*.whl + {{else}} + {{end -}} job_clusters: - job_cluster_key: job_cluster new_cluster: diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl new file mode 100644 index 000000000..ffe400cb8 --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl @@ -0,0 +1,12 @@ +# The main pipeline for {{.project_name}} +resources: + pipelines: + {{.project_name}}_pipeline: + name: "{{.project_name}}_pipeline" + target: "{{.project_name}}_${bundle.environment}" + libraries: + - notebook: + path: ../src/dlt_pipeline.ipynb + + configuration: + "bundle.sourcePath": "/Workspace/${workspace.file_path}/src" diff --git a/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl similarity index 84% rename from libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb rename to libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl index 2ee36c3c1..04bb261cd 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb +++ b/libs/template/templates/default-python/template/{{.project_name}}/scratch/exploration.ipynb.tmpl @@ -17,11 +17,15 @@ }, "outputs": [], "source": [ + {{- if (eq .include_python "yes") }} "import sys\n", "sys.path.append('../src')\n", - "from project import main\n", + "from {{.project_name}} import main\n", "\n", - "main.taxis.show(10)" + "main.get_taxis().show(10)" + {{else}} + "spark.range(10)" + {{end -}} ] } ], diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl new file mode 100644 index 000000000..74893238e --- /dev/null +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl @@ -0,0 +1,112 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "9a626959-61c8-4bba-84d2-2a4ecab1f7ec", + "showTitle": false, + "title": "" + } + }, + "source": [ + "# DLT pipeline\n", + "\n", + "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.my_project}}_pipeline.yml." + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "9198e987-5606-403d-9f6d-8f14e6a4017f", + "showTitle": false, + "title": "" + }, + "jupyter": { + {{- /* Collapse this cell by default. Just boring imports here! */}} + "source_hidden": true + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "# Import DLT and make sure 'my_project' is on the Python path\n", + "import dlt\n", + "from pyspark.sql.functions import expr\n", + "from pyspark.sql import SparkSession\n", + "spark = SparkSession.builder.getOrCreate()\n", + "import sys\n", + "try:\n", + " sys.path.append(spark.conf.get(\"bundle.sourcePath\"))\n", + "except:\n", + " pass\n", + "from my_project import main" + {{else}} + "# Import DLT\n", + "import dlt\n", + "from pyspark.sql.functions import expr\n", + "from pyspark.sql import SparkSession\n", + "spark = SparkSession.builder.getOrCreate()" + {{end -}} + ] + }, + { + "cell_type": "code", + "execution_count": 0, + "metadata": { + "application/vnd.databricks.v1+cell": { + "cellMetadata": {}, + "inputWidgets": {}, + "nuid": "3fc19dba-61fd-4a89-8f8c-24fee63bfb14", + "showTitle": false, + "title": "" + } + }, + "outputs": [], + "source": [ + {{- if (eq .include_python "yes") }} + "@dlt.view\n", + "def taxi_raw():\n", + " return main.get_taxis()\n", + {{else}} + "\n", + "@dlt.view\n", + "def taxi_raw():\n", + " return spark.read.format(\"json\").load(\"/databricks-datasets/nyctaxi/sample/json/\")\n", + {{end -}} + "\n", + "@dlt.table\n", + "def filtered_taxis():\n", + " return dlt.read(\"taxi_raw\").filter(expr(\"fare_amount < 30\"))" + ] + } + ], + "metadata": { + "application/vnd.databricks.v1+notebook": { + "dashboards": [], + "language": "python", + "notebookMetadata": { + "pythonIndentUnit": 2 + }, + "notebookName": "dlt_pipeline", + "widgets": {} + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "name": "python", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl index 26c743032..8423ecf8b 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl @@ -34,9 +34,13 @@ }, "outputs": [], "source": [ + {{- if (eq .include_python "yes") }} "from {{.project_name}} import main\n", "\n", - "main.get_taxis().show(10)\n" + "main.get_taxis().show(10)" + {{else}} + "spark.range(10)" + {{end -}} ] } ], diff --git a/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl index 92afccc6c..f1750046a 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/tests/main_test.py.tmpl @@ -2,4 +2,4 @@ from {{.project_name}} import main def test_main(): taxis = main.get_taxis() - assert taxis.count() == 5 + assert taxis.count() > 5 From c8f5990f47edf6429a88494e871084e49508835c Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Wed, 6 Sep 2023 13:46:21 +0200 Subject: [PATCH 106/139] Release v0.204.0 (#738) This release includes permission related commands for a subset of workspace services where they apply. These complement the `permissions` command and do not require specification of the object type to work with, as that is implied by the command they are nested under. CLI: * Group permission related commands ([#730](https://github.com/databricks/cli/pull/730)). Bundles: * Fixed artifact file uploading on Windows and wheel execution on DBR 13.3 ([#722](https://github.com/databricks/cli/pull/722)). * Make resource and artifact paths in bundle config relative to config folder ([#708](https://github.com/databricks/cli/pull/708)). * Add support for ordering of input prompts ([#662](https://github.com/databricks/cli/pull/662)). * Fix IsServicePrincipal() only working for workspace admins ([#732](https://github.com/databricks/cli/pull/732)). * databricks bundle init template v1 ([#686](https://github.com/databricks/cli/pull/686)). * databricks bundle init template v2: optional stubs, DLT support ([#700](https://github.com/databricks/cli/pull/700)). * Show 'databricks bundle init' template in CLI prompt ([#725](https://github.com/databricks/cli/pull/725)). * Include $PATH in set of environment variables to pass along. ([#736](https://github.com/databricks/cli/pull/736)). Internal: * Update Go SDK to v0.19.0 ([#729](https://github.com/databricks/cli/pull/729)). * Replace API call to test configuration with dummy authenticate call ([#728](https://github.com/databricks/cli/pull/728)). API Changes: * Changed `databricks account storage-credentials create` command to return . * Changed `databricks account storage-credentials get` command to return . * Changed `databricks account storage-credentials list` command to return . * Changed `databricks account storage-credentials update` command to return . * Changed `databricks connections create` command with new required argument order. * Changed `databricks connections update` command with new required argument order. * Changed `databricks volumes create` command with new required argument order. * Added `databricks artifact-allowlists` command group. * Added `databricks model-versions` command group. * Added `databricks registered-models` command group. * Added `databricks cluster-policies get-permission-levels` command. * Added `databricks cluster-policies get-permissions` command. * Added `databricks cluster-policies set-permissions` command. * Added `databricks cluster-policies update-permissions` command. * Added `databricks clusters get-permission-levels` command. * Added `databricks clusters get-permissions` command. * Added `databricks clusters set-permissions` command. * Added `databricks clusters update-permissions` command. * Added `databricks instance-pools get-permission-levels` command. * Added `databricks instance-pools get-permissions` command. * Added `databricks instance-pools set-permissions` command. * Added `databricks instance-pools update-permissions` command. * Added `databricks files` command group. * Changed `databricks permissions set` command to start returning . * Changed `databricks permissions update` command to start returning . * Added `databricks users get-permission-levels` command. * Added `databricks users get-permissions` command. * Added `databricks users set-permissions` command. * Added `databricks users update-permissions` command. * Added `databricks jobs get-permission-levels` command. * Added `databricks jobs get-permissions` command. * Added `databricks jobs set-permissions` command. * Added `databricks jobs update-permissions` command. * Changed `databricks experiments get-by-name` command to return . * Changed `databricks experiments get-experiment` command to return . * Added `databricks experiments delete-runs` command. * Added `databricks experiments get-permission-levels` command. * Added `databricks experiments get-permissions` command. * Added `databricks experiments restore-runs` command. * Added `databricks experiments set-permissions` command. * Added `databricks experiments update-permissions` command. * Added `databricks model-registry get-permission-levels` command. * Added `databricks model-registry get-permissions` command. * Added `databricks model-registry set-permissions` command. * Added `databricks model-registry update-permissions` command. * Added `databricks pipelines get-permission-levels` command. * Added `databricks pipelines get-permissions` command. * Added `databricks pipelines set-permissions` command. * Added `databricks pipelines update-permissions` command. * Added `databricks serving-endpoints get-permission-levels` command. * Added `databricks serving-endpoints get-permissions` command. * Added `databricks serving-endpoints set-permissions` command. * Added `databricks serving-endpoints update-permissions` command. * Added `databricks token-management get-permission-levels` command. * Added `databricks token-management get-permissions` command. * Added `databricks token-management set-permissions` command. * Added `databricks token-management update-permissions` command. * Changed `databricks dashboards create` command with new required argument order. * Added `databricks warehouses get-permission-levels` command. * Added `databricks warehouses get-permissions` command. * Added `databricks warehouses set-permissions` command. * Added `databricks warehouses update-permissions` command. * Added `databricks dashboard-widgets` command group. * Added `databricks query-visualizations` command group. * Added `databricks repos get-permission-levels` command. * Added `databricks repos get-permissions` command. * Added `databricks repos set-permissions` command. * Added `databricks repos update-permissions` command. * Added `databricks secrets get-secret` command. * Added `databricks workspace get-permission-levels` command. * Added `databricks workspace get-permissions` command. * Added `databricks workspace set-permissions` command. * Added `databricks workspace update-permissions` command. OpenAPI commit 09a7fa63d9ae243e5407941f200960ca14d48b07 (2023-09-04) --- CHANGELOG.md | 101 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6fcbab8ce..9835b0bce 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,106 @@ # Version changelog +## 0.204.0 + +This release includes permission related commands for a subset of workspace +services where they apply. These complement the `permissions` command and +do not require specification of the object type to work with, as that is +implied by the command they are nested under. + +CLI: + * Group permission related commands ([#730](https://github.com/databricks/cli/pull/730)). + +Bundles: + * Fixed artifact file uploading on Windows and wheel execution on DBR 13.3 ([#722](https://github.com/databricks/cli/pull/722)). + * Make resource and artifact paths in bundle config relative to config folder ([#708](https://github.com/databricks/cli/pull/708)). + * Add support for ordering of input prompts ([#662](https://github.com/databricks/cli/pull/662)). + * Fix IsServicePrincipal() only working for workspace admins ([#732](https://github.com/databricks/cli/pull/732)). + * databricks bundle init template v1 ([#686](https://github.com/databricks/cli/pull/686)). + * databricks bundle init template v2: optional stubs, DLT support ([#700](https://github.com/databricks/cli/pull/700)). + * Show 'databricks bundle init' template in CLI prompt ([#725](https://github.com/databricks/cli/pull/725)). + * Include in set of environment variables to pass along. ([#736](https://github.com/databricks/cli/pull/736)). + +Internal: + * Update Go SDK to v0.19.0 ([#729](https://github.com/databricks/cli/pull/729)). + * Replace API call to test configuration with dummy authenticate call ([#728](https://github.com/databricks/cli/pull/728)). + +API Changes: + * Changed `databricks account storage-credentials create` command to return . + * Changed `databricks account storage-credentials get` command to return . + * Changed `databricks account storage-credentials list` command to return . + * Changed `databricks account storage-credentials update` command to return . + * Changed `databricks connections create` command with new required argument order. + * Changed `databricks connections update` command with new required argument order. + * Changed `databricks volumes create` command with new required argument order. + * Added `databricks artifact-allowlists` command group. + * Added `databricks model-versions` command group. + * Added `databricks registered-models` command group. + * Added `databricks cluster-policies get-permission-levels` command. + * Added `databricks cluster-policies get-permissions` command. + * Added `databricks cluster-policies set-permissions` command. + * Added `databricks cluster-policies update-permissions` command. + * Added `databricks clusters get-permission-levels` command. + * Added `databricks clusters get-permissions` command. + * Added `databricks clusters set-permissions` command. + * Added `databricks clusters update-permissions` command. + * Added `databricks instance-pools get-permission-levels` command. + * Added `databricks instance-pools get-permissions` command. + * Added `databricks instance-pools set-permissions` command. + * Added `databricks instance-pools update-permissions` command. + * Added `databricks files` command group. + * Changed `databricks permissions set` command to start returning . + * Changed `databricks permissions update` command to start returning . + * Added `databricks users get-permission-levels` command. + * Added `databricks users get-permissions` command. + * Added `databricks users set-permissions` command. + * Added `databricks users update-permissions` command. + * Added `databricks jobs get-permission-levels` command. + * Added `databricks jobs get-permissions` command. + * Added `databricks jobs set-permissions` command. + * Added `databricks jobs update-permissions` command. + * Changed `databricks experiments get-by-name` command to return . + * Changed `databricks experiments get-experiment` command to return . + * Added `databricks experiments delete-runs` command. + * Added `databricks experiments get-permission-levels` command. + * Added `databricks experiments get-permissions` command. + * Added `databricks experiments restore-runs` command. + * Added `databricks experiments set-permissions` command. + * Added `databricks experiments update-permissions` command. + * Added `databricks model-registry get-permission-levels` command. + * Added `databricks model-registry get-permissions` command. + * Added `databricks model-registry set-permissions` command. + * Added `databricks model-registry update-permissions` command. + * Added `databricks pipelines get-permission-levels` command. + * Added `databricks pipelines get-permissions` command. + * Added `databricks pipelines set-permissions` command. + * Added `databricks pipelines update-permissions` command. + * Added `databricks serving-endpoints get-permission-levels` command. + * Added `databricks serving-endpoints get-permissions` command. + * Added `databricks serving-endpoints set-permissions` command. + * Added `databricks serving-endpoints update-permissions` command. + * Added `databricks token-management get-permission-levels` command. + * Added `databricks token-management get-permissions` command. + * Added `databricks token-management set-permissions` command. + * Added `databricks token-management update-permissions` command. + * Changed `databricks dashboards create` command with new required argument order. + * Added `databricks warehouses get-permission-levels` command. + * Added `databricks warehouses get-permissions` command. + * Added `databricks warehouses set-permissions` command. + * Added `databricks warehouses update-permissions` command. + * Added `databricks dashboard-widgets` command group. + * Added `databricks query-visualizations` command group. + * Added `databricks repos get-permission-levels` command. + * Added `databricks repos get-permissions` command. + * Added `databricks repos set-permissions` command. + * Added `databricks repos update-permissions` command. + * Added `databricks secrets get-secret` command. + * Added `databricks workspace get-permission-levels` command. + * Added `databricks workspace get-permissions` command. + * Added `databricks workspace set-permissions` command. + * Added `databricks workspace update-permissions` command. + +OpenAPI commit 09a7fa63d9ae243e5407941f200960ca14d48b07 (2023-09-04) + ## 0.203.3 Bundles: From 3c79181148957592b53412f97f63fbbfd6aa00f5 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Wed, 6 Sep 2023 20:18:15 +0200 Subject: [PATCH 107/139] Remove unused file (#742) defaults.json was originally used in tests. It's no longer used and should be removed. --- libs/template/templates/default-python/defaults.json | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 libs/template/templates/default-python/defaults.json diff --git a/libs/template/templates/default-python/defaults.json b/libs/template/templates/default-python/defaults.json deleted file mode 100644 index 510ec4a3d..000000000 --- a/libs/template/templates/default-python/defaults.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "project_name": "my_project", - "include_notebook": "yes", - "include_dlt": "yes", - "include_python": "yes" -} From 50b2c0b83bde8bd645c1165ec6b70acf06284151 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Thu, 7 Sep 2023 10:26:43 +0200 Subject: [PATCH 108/139] Fix notebook showing up in template when not selected (#743) ## Changes This fixes a typo that caused the notebook.ipynb file to show up even if the user answered "no" to the question about including a notebook. ## Tests We have matrix validation tests for all the yes/no combinations and whether the build + validate. There is no current test for the absence of files. --- libs/template/templates/default-python/template/__preamble.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/template/templates/default-python/template/__preamble.tmpl b/libs/template/templates/default-python/template/__preamble.tmpl index c018f2825..95c613332 100644 --- a/libs/template/templates/default-python/template/__preamble.tmpl +++ b/libs/template/templates/default-python/template/__preamble.tmpl @@ -28,7 +28,7 @@ This file only template directives; it is skipped for the actual output. {{end}} {{if $notNotebook}} - {{skip "{{.project_name}}/src/notebook.iypnb"}} + {{skip "{{.project_name}}/src/notebook.ipynb"}} {{end}} {{if (and $notDLT $notNotebook $notPython)}} From c0ebfb8101700d6e6300954c2efcf2077c690e01 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Thu, 7 Sep 2023 14:48:59 +0200 Subject: [PATCH 109/139] Fix conversion of job parameters (#744) ## Changes Another example of singular/plural conversion. Longer term solution is we do a full sweep of the type using reflection to make sure we cover all fields. ## Tests Unit test passes. --- bundle/deploy/terraform/convert.go | 6 ++++++ bundle/deploy/terraform/convert_test.go | 13 +++++++++++++ 2 files changed, 19 insertions(+) diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 41bde91d8..cd480c898 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -90,6 +90,12 @@ func BundleToTerraform(config *config.Root) (*schema.Root, bool) { Tag: git.GitTag, } } + + for _, v := range src.Parameters { + var t schema.ResourceJobParameter + conv(v, &t) + dst.Parameter = append(dst.Parameter, t) + } } tfroot.Resource.Job[k] = &dst diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 4d912fbe0..34a65d70d 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -29,6 +29,16 @@ func TestConvertJob(t *testing.T) { GitProvider: jobs.GitProviderGitHub, GitUrl: "https://github.com/foo/bar", }, + Parameters: []jobs.JobParameterDefinition{ + { + Name: "param1", + Default: "default1", + }, + { + Name: "param2", + Default: "default2", + }, + }, }, } @@ -44,6 +54,9 @@ func TestConvertJob(t *testing.T) { assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) assert.Len(t, out.Resource.Job["my_job"].JobCluster, 1) assert.Equal(t, "https://github.com/foo/bar", out.Resource.Job["my_job"].GitSource.Url) + assert.Len(t, out.Resource.Job["my_job"].Parameter, 2) + assert.Equal(t, "param1", out.Resource.Job["my_job"].Parameter[0].Name) + assert.Equal(t, "param2", out.Resource.Job["my_job"].Parameter[1].Name) assert.Nil(t, out.Data) } From 10e08367495e0400b16ba68da5bb218e626ebcfb Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 7 Sep 2023 16:08:16 +0200 Subject: [PATCH 110/139] Added end-to-end test for deploying and running Python wheel task (#741) ## Changes Added end-to-end test for deploying and running Python wheel task ## Tests Test successfully passed on all environments, takes about 9-10 minutes to pass. ``` Deleted snapshot file at /var/folders/nt/xjv68qzs45319w4k36dhpylc0000gp/T/TestAccPythonWheelTaskDeployAndRun1845899209/002/.databricks/bundle/default/sync-snapshots/1f7cc766ffe038d6.json Successfully deleted files! 2023/09/06 17:50:50 INFO Releasing deployment lock mutator=destroy mutator=seq mutator=seq mutator=deferred mutator=lock:release --- PASS: TestAccPythonWheelTaskDeployAndRun (508.16s) PASS coverage: 77.9% of statements in ./... ok github.com/databricks/cli/internal/bundle 508.810s coverage: 77.9% of statements in ./... ``` --------- Co-authored-by: Pieter Noordhuis --- bundle/deploy/terraform/init.go | 2 + .../databricks_template_schema.json | 17 +++++ .../template/databricks.yml.tmpl | 21 ++++++ .../python_wheel_task/template/setup.py.tmpl | 15 ++++ .../template/{{.project_name}}/__init__.py | 2 + .../template/{{.project_name}}/__main__.py | 16 +++++ internal/bundle/helpers.go | 70 +++++++++++++++++++ internal/bundle/python_wheel_test.go | 41 +++++++++++ internal/helpers.go | 13 +++- 9 files changed, 196 insertions(+), 1 deletion(-) create mode 100644 internal/bundle/bundles/python_wheel_task/databricks_template_schema.json create mode 100644 internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl create mode 100644 internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl create mode 100644 internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py create mode 100644 internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py create mode 100644 internal/bundle/helpers.go create mode 100644 internal/bundle/python_wheel_test.go diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 878c4e8b2..60f0a6c4f 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -8,6 +8,7 @@ import ( "path/filepath" "runtime" "strings" + "time" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" @@ -59,6 +60,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con Product: product.Terraform, Version: version.Must(version.NewVersion("1.5.5")), InstallDir: binDir, + Timeout: 1 * time.Minute, } execPath, err = installer.Install(ctx) if err != nil { diff --git a/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json new file mode 100644 index 000000000..b39a628c1 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json @@ -0,0 +1,17 @@ +{ + "properties": { + "project_name": { + "type": "string", + "default": "my_test_code", + "description": "Unique name for this project" + }, + "spark_version": { + "type": "string", + "description": "Spark version used for job cluster" + }, + "node_type_id": { + "type": "string", + "description": "Node type id for job cluster" + } + } +} diff --git a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl new file mode 100644 index 000000000..4386879a4 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl @@ -0,0 +1,21 @@ +bundle: + name: wheel-task + +resources: + jobs: + some_other_job: + name: "[${bundle.target}] Test Wheel Job" + tasks: + - task_key: TestTask + new_cluster: + num_workers: 1 + spark_version: "{{.spark_version}}" + node_type_id: "{{.node_type_id}}" + python_wheel_task: + package_name: my_test_code + entry_point: run + parameters: + - "one" + - "two" + libraries: + - whl: ./dist/*.whl diff --git a/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl b/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl new file mode 100644 index 000000000..b528657b1 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/setup.py.tmpl @@ -0,0 +1,15 @@ +from setuptools import setup, find_packages + +import {{.project_name}} + +setup( + name="{{.project_name}}", + version={{.project_name}}.__version__, + author={{.project_name}}.__author__, + url="https://databricks.com", + author_email="john.doe@databricks.com", + description="my example wheel", + packages=find_packages(include=["{{.project_name}}"]), + entry_points={"group1": "run={{.project_name}}.__main__:main"}, + install_requires=["setuptools"], +) diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py new file mode 100644 index 000000000..909f1f322 --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__init__.py @@ -0,0 +1,2 @@ +__version__ = "0.0.1" +__author__ = "Databricks" diff --git a/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py new file mode 100644 index 000000000..ea918ce2d --- /dev/null +++ b/internal/bundle/bundles/python_wheel_task/template/{{.project_name}}/__main__.py @@ -0,0 +1,16 @@ +""" +The entry point of the Python Wheel +""" + +import sys + + +def main(): + # This method will print the provided arguments + print("Hello from my func") + print("Got arguments:") + print(sys.argv) + + +if __name__ == "__main__": + main() diff --git a/internal/bundle/helpers.go b/internal/bundle/helpers.go new file mode 100644 index 000000000..3fd4eabc9 --- /dev/null +++ b/internal/bundle/helpers.go @@ -0,0 +1,70 @@ +package bundle + +import ( + "context" + "encoding/json" + "os" + "path/filepath" + "strings" + "testing" + + "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/internal" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/flags" + "github.com/databricks/cli/libs/template" +) + +func initTestTemplate(t *testing.T, templateName string, config map[string]any) (string, error) { + templateRoot := filepath.Join("bundles", templateName) + + bundleRoot := t.TempDir() + configFilePath, err := writeConfigFile(t, config) + if err != nil { + return "", err + } + + ctx := root.SetWorkspaceClient(context.Background(), nil) + cmd := cmdio.NewIO(flags.OutputJSON, strings.NewReader(""), os.Stdout, os.Stderr, "bundles") + ctx = cmdio.InContext(ctx, cmd) + + err = template.Materialize(ctx, configFilePath, templateRoot, bundleRoot) + return bundleRoot, err +} + +func writeConfigFile(t *testing.T, config map[string]any) (string, error) { + bytes, err := json.Marshal(config) + if err != nil { + return "", err + } + + dir := t.TempDir() + filepath := filepath.Join(dir, "config.json") + t.Log("Configuration for template: ", string(bytes)) + + err = os.WriteFile(filepath, bytes, 0644) + return filepath, err +} + +func deployBundle(t *testing.T, path string) error { + t.Setenv("BUNDLE_ROOT", path) + c := internal.NewCobraTestRunner(t, "bundle", "deploy", "--force-lock") + _, _, err := c.Run() + return err +} + +func runResource(t *testing.T, path string, key string) (string, error) { + ctx := context.Background() + ctx = cmdio.NewContext(ctx, cmdio.Default()) + + c := internal.NewCobraTestRunnerWithContext(t, ctx, "bundle", "run", key) + stdout, _, err := c.Run() + return stdout.String(), err +} + +func destroyBundle(t *testing.T, path string) error { + t.Setenv("BUNDLE_ROOT", path) + c := internal.NewCobraTestRunner(t, "bundle", "destroy", "--auto-approve") + _, _, err := c.Run() + return err +} diff --git a/internal/bundle/python_wheel_test.go b/internal/bundle/python_wheel_test.go new file mode 100644 index 000000000..52683edcc --- /dev/null +++ b/internal/bundle/python_wheel_test.go @@ -0,0 +1,41 @@ +package bundle + +import ( + "testing" + + "github.com/databricks/cli/internal" + "github.com/stretchr/testify/require" +) + +func TestAccPythonWheelTaskDeployAndRun(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + var nodeTypeId string + if env == "gcp" { + nodeTypeId = "n1-standard-4" + } else if env == "aws" { + nodeTypeId = "i3.xlarge" + } else { + nodeTypeId = "Standard_DS4_v2" + } + + bundleRoot, err := initTestTemplate(t, "python_wheel_task", map[string]any{ + "node_type_id": nodeTypeId, + "spark_version": "13.2.x-snapshot-scala2.12", + }) + require.NoError(t, err) + + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + t.Cleanup(func() { + destroyBundle(t, bundleRoot) + }) + + out, err := runResource(t, bundleRoot, "some_other_job") + require.NoError(t, err) + require.Contains(t, out, "Hello from my func") + require.Contains(t, out, "Got arguments:") + require.Contains(t, out, "['python', 'one', 'two']") +} diff --git a/internal/helpers.go b/internal/helpers.go index ddc005173..bf27fbb55 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -58,6 +58,8 @@ type cobraTestRunner struct { stdout bytes.Buffer stderr bytes.Buffer + ctx context.Context + // Line-by-line output. // Background goroutines populate these channels by reading from stdout/stderr pipes. stdoutLines <-chan string @@ -128,7 +130,7 @@ func (t *cobraTestRunner) RunBackground() { t.registerFlagCleanup(root) errch := make(chan error) - ctx, cancel := context.WithCancel(context.Background()) + ctx, cancel := context.WithCancel(t.ctx) // Tee stdout/stderr to buffers. stdoutR = io.TeeReader(stdoutR, &t.stdout) @@ -234,6 +236,15 @@ func (c *cobraTestRunner) Eventually(condition func() bool, waitFor time.Duratio func NewCobraTestRunner(t *testing.T, args ...string) *cobraTestRunner { return &cobraTestRunner{ T: t, + ctx: context.Background(), + args: args, + } +} + +func NewCobraTestRunnerWithContext(t *testing.T, ctx context.Context, args ...string) *cobraTestRunner { + return &cobraTestRunner{ + T: t, + ctx: ctx, args: args, } } From 1a7bf4e4f127b9ef6dc3aa6ab88f8dd437174d9f Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 7 Sep 2023 16:36:06 +0200 Subject: [PATCH 111/139] Add schema and config validation to jsonschema package (#740) ## Changes At a high level this PR adds new schema validation and moves functionality that should be present in the jsonschema package, but resides in the template package today, to the jsonschema package. This includes for example schema validation, schema instance validation, to / from string conversion methods etc. The list below outlines all the pieces that have been moved over, and the new validation bits added. This PR: 1. Adds casting default value of schema properties to integers to the jsonschema.Load method. 2. Adds validation for default value types for schema properties, checking they are consistant with the type defined. 3. Introduces the LoadInstance and ValidateInstance methods to the json schema package. These methods can be used to read and validate JSON documents against the schema. 4. Replaces validation done for template inputs to use the newly defined JSON schema validation functions. 5. Moves to/from string and isInteger utility methods to the json schema package. ## Tests Existing and new unit tests. --- libs/jsonschema/instance.go | 91 ++++++++++++ libs/jsonschema/instance_test.go | 129 +++++++++++++++++ libs/jsonschema/schema.go | 32 +++++ libs/jsonschema/schema_test.go | 39 ++++- .../instance-load/invalid-type-instance.json | 6 + .../instance-load/valid-instance.json | 6 + .../test-schema-no-additional-properties.json | 19 +++ .../test-schema-some-fields-required.json | 19 +++ .../instance-validate/test-schema.json | 18 +++ .../schema-invalid-default.json | 9 ++ .../schema-load-int/schema-valid.json | 9 ++ libs/{template => jsonschema}/utils.go | 28 ++-- libs/{template => jsonschema}/utils_test.go | 41 +++--- .../validate_type.go} | 20 ++- .../validate_type_test.go} | 41 +++--- libs/template/config.go | 95 ++----------- libs/template/config_test.go | 134 ++++++------------ .../config-test-schema/test-schema.json | 18 +++ 18 files changed, 512 insertions(+), 242 deletions(-) create mode 100644 libs/jsonschema/instance.go create mode 100644 libs/jsonschema/instance_test.go create mode 100644 libs/jsonschema/testdata/instance-load/invalid-type-instance.json create mode 100644 libs/jsonschema/testdata/instance-load/valid-instance.json create mode 100644 libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json create mode 100644 libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json create mode 100644 libs/jsonschema/testdata/instance-validate/test-schema.json create mode 100644 libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json create mode 100644 libs/jsonschema/testdata/schema-load-int/schema-valid.json rename libs/{template => jsonschema}/utils.go (80%) rename libs/{template => jsonschema}/utils_test.go (72%) rename libs/{template/validators.go => jsonschema/validate_type.go} (68%) rename libs/{template/validators_test.go => jsonschema/validate_type_test.go} (75%) create mode 100644 libs/template/testdata/config-test-schema/test-schema.json diff --git a/libs/jsonschema/instance.go b/libs/jsonschema/instance.go new file mode 100644 index 000000000..02ab9f281 --- /dev/null +++ b/libs/jsonschema/instance.go @@ -0,0 +1,91 @@ +package jsonschema + +import ( + "encoding/json" + "fmt" + "os" +) + +// Load a JSON document and validate it against the JSON schema. Instance here +// refers to a JSON document. see: https://json-schema.org/draft/2020-12/json-schema-core.html#name-instance +func (s *Schema) LoadInstance(path string) (map[string]any, error) { + instance := make(map[string]any) + b, err := os.ReadFile(path) + if err != nil { + return nil, err + } + err = json.Unmarshal(b, &instance) + if err != nil { + return nil, err + } + + // The default JSON unmarshaler parses untyped number values as float64. + // We convert integer properties from float64 to int64 here. + for name, v := range instance { + propertySchema, ok := s.Properties[name] + if !ok { + continue + } + if propertySchema.Type != IntegerType { + continue + } + integerValue, err := toInteger(v) + if err != nil { + return nil, fmt.Errorf("failed to parse property %s: %w", name, err) + } + instance[name] = integerValue + } + return instance, s.ValidateInstance(instance) +} + +func (s *Schema) ValidateInstance(instance map[string]any) error { + if err := s.validateAdditionalProperties(instance); err != nil { + return err + } + if err := s.validateRequired(instance); err != nil { + return err + } + return s.validateTypes(instance) +} + +// If additional properties is set to false, this function validates instance only +// contains properties defined in the schema. +func (s *Schema) validateAdditionalProperties(instance map[string]any) error { + // Note: AdditionalProperties has the type any. + if s.AdditionalProperties != false { + return nil + } + for k := range instance { + _, ok := s.Properties[k] + if !ok { + return fmt.Errorf("property %s is not defined in the schema", k) + } + } + return nil +} + +// This function validates that all require properties in the schema have values +// in the instance. +func (s *Schema) validateRequired(instance map[string]any) error { + for _, name := range s.Required { + if _, ok := instance[name]; !ok { + return fmt.Errorf("no value provided for required property %s", name) + } + } + return nil +} + +// Validates the types of all input properties values match their types defined in the schema +func (s *Schema) validateTypes(instance map[string]any) error { + for k, v := range instance { + fieldInfo, ok := s.Properties[k] + if !ok { + continue + } + err := validateType(v, fieldInfo.Type) + if err != nil { + return fmt.Errorf("incorrect type for property %s: %w", k, err) + } + } + return nil +} diff --git a/libs/jsonschema/instance_test.go b/libs/jsonschema/instance_test.go new file mode 100644 index 000000000..d5e0766dd --- /dev/null +++ b/libs/jsonschema/instance_test.go @@ -0,0 +1,129 @@ +package jsonschema + +import ( + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestValidateInstanceAdditionalPropertiesPermitted(t *testing.T) { + instance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + "an_additional_property": "abc", + } + + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + err = schema.validateAdditionalProperties(instance) + assert.NoError(t, err) + + err = schema.ValidateInstance(instance) + assert.NoError(t, err) +} + +func TestValidateInstanceAdditionalPropertiesForbidden(t *testing.T) { + instance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + "an_additional_property": "abc", + } + + schema, err := Load("./testdata/instance-validate/test-schema-no-additional-properties.json") + require.NoError(t, err) + + err = schema.validateAdditionalProperties(instance) + assert.EqualError(t, err, "property an_additional_property is not defined in the schema") + + err = schema.ValidateInstance(instance) + assert.EqualError(t, err, "property an_additional_property is not defined in the schema") + + instanceWOAdditionalProperties := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateAdditionalProperties(instanceWOAdditionalProperties) + assert.NoError(t, err) + + err = schema.ValidateInstance(instanceWOAdditionalProperties) + assert.NoError(t, err) +} + +func TestValidateInstanceTypes(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateTypes(validInstance) + assert.NoError(t, err) + + err = schema.ValidateInstance(validInstance) + assert.NoError(t, err) + + invalidInstance := map[string]any{ + "int_val": "abc", + "float_val": 1.0, + "bool_val": false, + } + + err = schema.validateTypes(invalidInstance) + assert.EqualError(t, err, "incorrect type for property int_val: expected type integer, but value is \"abc\"") + + err = schema.ValidateInstance(invalidInstance) + assert.EqualError(t, err, "incorrect type for property int_val: expected type integer, but value is \"abc\"") +} + +func TestValidateInstanceRequired(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema-some-fields-required.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, + } + err = schema.validateRequired(validInstance) + assert.NoError(t, err) + err = schema.ValidateInstance(validInstance) + assert.NoError(t, err) + + invalidInstance := map[string]any{ + "string_val": "abc", + "float_val": 1.0, + "bool_val": false, + } + err = schema.validateRequired(invalidInstance) + assert.EqualError(t, err, "no value provided for required property int_val") + err = schema.ValidateInstance(invalidInstance) + assert.EqualError(t, err, "no value provided for required property int_val") +} + +func TestLoadInstance(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema.json") + require.NoError(t, err) + + // Expect the instance to be loaded successfully. + instance, err := schema.LoadInstance("./testdata/instance-load/valid-instance.json") + assert.NoError(t, err) + assert.Equal(t, map[string]any{ + "bool_val": false, + "int_val": int64(1), + "string_val": "abc", + "float_val": 2.0, + }, instance) + + // Expect instance validation against the schema to fail. + _, err = schema.LoadInstance("./testdata/instance-load/invalid-type-instance.json") + assert.EqualError(t, err, "incorrect type for property string_val: expected type string, but value is 123") +} diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index 87e9acd56..44c65ecc6 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -58,6 +58,7 @@ const ( ) func (schema *Schema) validate() error { + // Validate property types are all valid JSON schema types. for _, v := range schema.Properties { switch v.Type { case NumberType, BooleanType, StringType, IntegerType: @@ -72,6 +73,17 @@ func (schema *Schema) validate() error { return fmt.Errorf("type %s is not a recognized json schema type", v.Type) } } + + // Validate default property values are consistent with types. + for name, property := range schema.Properties { + if property.Default == nil { + continue + } + if err := validateType(property.Default, property.Type); err != nil { + return fmt.Errorf("type validation for default value of property %s failed: %w", name, err) + } + } + return nil } @@ -85,5 +97,25 @@ func Load(path string) (*Schema, error) { if err != nil { return nil, err } + + // Convert the default values of top-level properties to integers. + // This is required because the default JSON unmarshaler parses numbers + // as floats when the Golang field it's being loaded to is untyped. + // + // NOTE: properties can be recursively defined in a schema, but the current + // use-cases only uses the first layer of properties so we skip converting + // any recursive properties. + for name, property := range schema.Properties { + if property.Type != IntegerType { + continue + } + if property.Default != nil { + property.Default, err = toInteger(property.Default) + if err != nil { + return nil, fmt.Errorf("failed to parse default value for property %s: %w", name, err) + } + } + } + return schema, schema.validate() } diff --git a/libs/jsonschema/schema_test.go b/libs/jsonschema/schema_test.go index 76112492f..5b92d8466 100644 --- a/libs/jsonschema/schema_test.go +++ b/libs/jsonschema/schema_test.go @@ -6,7 +6,7 @@ import ( "github.com/stretchr/testify/assert" ) -func TestJsonSchemaValidate(t *testing.T) { +func TestSchemaValidateTypeNames(t *testing.T) { var err error toSchema := func(s string) *Schema { return &Schema{ @@ -42,3 +42,40 @@ func TestJsonSchemaValidate(t *testing.T) { err = toSchema("foobar").validate() assert.EqualError(t, err, "type foobar is not a recognized json schema type") } + +func TestSchemaLoadIntegers(t *testing.T) { + schema, err := Load("./testdata/schema-load-int/schema-valid.json") + assert.NoError(t, err) + assert.Equal(t, int64(1), schema.Properties["abc"].Default) +} + +func TestSchemaLoadIntegersWithInvalidDefault(t *testing.T) { + _, err := Load("./testdata/schema-load-int/schema-invalid-default.json") + assert.EqualError(t, err, "failed to parse default value for property abc: expected integer value, got: 1.1") +} + +func TestSchemaValidateDefaultType(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "number", + Default: "abc", + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "type validation for default value of property foo failed: expected type float, but value is \"abc\"") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Default: true, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} diff --git a/libs/jsonschema/testdata/instance-load/invalid-type-instance.json b/libs/jsonschema/testdata/instance-load/invalid-type-instance.json new file mode 100644 index 000000000..c55b6fccb --- /dev/null +++ b/libs/jsonschema/testdata/instance-load/invalid-type-instance.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "bool_val": false, + "string_val": 123, + "float_val": 3.0 +} diff --git a/libs/jsonschema/testdata/instance-load/valid-instance.json b/libs/jsonschema/testdata/instance-load/valid-instance.json new file mode 100644 index 000000000..7d4dc818a --- /dev/null +++ b/libs/jsonschema/testdata/instance-load/valid-instance.json @@ -0,0 +1,6 @@ +{ + "int_val": 1, + "bool_val": false, + "string_val": "abc", + "float_val": 2.0 +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json b/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json new file mode 100644 index 000000000..98b19d5a4 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-no-additional-properties.json @@ -0,0 +1,19 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + }, + "additionalProperties": false +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json b/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json new file mode 100644 index 000000000..465811034 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-some-fields-required.json @@ -0,0 +1,19 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + }, + "required": ["int_val", "float_val", "bool_val"] +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema.json b/libs/jsonschema/testdata/instance-validate/test-schema.json new file mode 100644 index 000000000..41eb82519 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema.json @@ -0,0 +1,18 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json b/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json new file mode 100644 index 000000000..1e709f622 --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-invalid-default.json @@ -0,0 +1,9 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1.1 + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-valid.json b/libs/jsonschema/testdata/schema-load-int/schema-valid.json new file mode 100644 index 000000000..599ac04d0 --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-valid.json @@ -0,0 +1,9 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1 + } + } +} diff --git a/libs/template/utils.go b/libs/jsonschema/utils.go similarity index 80% rename from libs/template/utils.go rename to libs/jsonschema/utils.go index ade6a5730..21866965e 100644 --- a/libs/template/utils.go +++ b/libs/jsonschema/utils.go @@ -1,11 +1,9 @@ -package template +package jsonschema import ( "errors" "fmt" "strconv" - - "github.com/databricks/cli/libs/jsonschema" ) // function to check whether a float value represents an integer @@ -40,41 +38,41 @@ func toInteger(v any) (int64, error) { } } -func toString(v any, T jsonschema.Type) (string, error) { +func ToString(v any, T Type) (string, error) { switch T { - case jsonschema.BooleanType: + case BooleanType: boolVal, ok := v.(bool) if !ok { return "", fmt.Errorf("expected bool, got: %#v", v) } return strconv.FormatBool(boolVal), nil - case jsonschema.StringType: + case StringType: strVal, ok := v.(string) if !ok { return "", fmt.Errorf("expected string, got: %#v", v) } return strVal, nil - case jsonschema.NumberType: + case NumberType: floatVal, ok := v.(float64) if !ok { return "", fmt.Errorf("expected float, got: %#v", v) } return strconv.FormatFloat(floatVal, 'f', -1, 64), nil - case jsonschema.IntegerType: + case IntegerType: intVal, err := toInteger(v) if err != nil { return "", err } return strconv.FormatInt(intVal, 10), nil - case jsonschema.ArrayType, jsonschema.ObjectType: + case ArrayType, ObjectType: return "", fmt.Errorf("cannot format object of type %s as a string. Value of object: %#v", T, v) default: return "", fmt.Errorf("unknown json schema type: %q", T) } } -func fromString(s string, T jsonschema.Type) (any, error) { - if T == jsonschema.StringType { +func FromString(s string, T Type) (any, error) { + if T == StringType { return s, nil } @@ -83,13 +81,13 @@ func fromString(s string, T jsonschema.Type) (any, error) { var err error switch T { - case jsonschema.BooleanType: + case BooleanType: v, err = strconv.ParseBool(s) - case jsonschema.NumberType: + case NumberType: v, err = strconv.ParseFloat(s, 32) - case jsonschema.IntegerType: + case IntegerType: v, err = strconv.ParseInt(s, 10, 64) - case jsonschema.ArrayType, jsonschema.ObjectType: + case ArrayType, ObjectType: return "", fmt.Errorf("cannot parse string as object of type %s. Value of string: %q", T, s) default: return "", fmt.Errorf("unknown json schema type: %q", T) diff --git a/libs/template/utils_test.go b/libs/jsonschema/utils_test.go similarity index 72% rename from libs/template/utils_test.go rename to libs/jsonschema/utils_test.go index 1e038aac6..9686cf39b 100644 --- a/libs/template/utils_test.go +++ b/libs/jsonschema/utils_test.go @@ -1,10 +1,9 @@ -package template +package jsonschema import ( "math" "testing" - "github.com/databricks/cli/libs/jsonschema" "github.com/stretchr/testify/assert" ) @@ -50,72 +49,72 @@ func TestTemplateToInteger(t *testing.T) { } func TestTemplateToString(t *testing.T) { - s, err := toString(true, jsonschema.BooleanType) + s, err := ToString(true, BooleanType) assert.NoError(t, err) assert.Equal(t, "true", s) - s, err = toString("abc", jsonschema.StringType) + s, err = ToString("abc", StringType) assert.NoError(t, err) assert.Equal(t, "abc", s) - s, err = toString(1.1, jsonschema.NumberType) + s, err = ToString(1.1, NumberType) assert.NoError(t, err) assert.Equal(t, "1.1", s) - s, err = toString(2, jsonschema.IntegerType) + s, err = ToString(2, IntegerType) assert.NoError(t, err) assert.Equal(t, "2", s) - _, err = toString([]string{}, jsonschema.ArrayType) + _, err = ToString([]string{}, ArrayType) assert.EqualError(t, err, "cannot format object of type array as a string. Value of object: []string{}") - _, err = toString("true", jsonschema.BooleanType) + _, err = ToString("true", BooleanType) assert.EqualError(t, err, "expected bool, got: \"true\"") - _, err = toString(123, jsonschema.StringType) + _, err = ToString(123, StringType) assert.EqualError(t, err, "expected string, got: 123") - _, err = toString(false, jsonschema.NumberType) + _, err = ToString(false, NumberType) assert.EqualError(t, err, "expected float, got: false") - _, err = toString("abc", jsonschema.IntegerType) + _, err = ToString("abc", IntegerType) assert.EqualError(t, err, "cannot convert \"abc\" to an integer") - _, err = toString("abc", "foobar") + _, err = ToString("abc", "foobar") assert.EqualError(t, err, "unknown json schema type: \"foobar\"") } func TestTemplateFromString(t *testing.T) { - v, err := fromString("true", jsonschema.BooleanType) + v, err := FromString("true", BooleanType) assert.NoError(t, err) assert.Equal(t, true, v) - v, err = fromString("abc", jsonschema.StringType) + v, err = FromString("abc", StringType) assert.NoError(t, err) assert.Equal(t, "abc", v) - v, err = fromString("1.1", jsonschema.NumberType) + v, err = FromString("1.1", NumberType) assert.NoError(t, err) // Floating point conversions are not perfect assert.True(t, (v.(float64)-1.1) < 0.000001) - v, err = fromString("12345", jsonschema.IntegerType) + v, err = FromString("12345", IntegerType) assert.NoError(t, err) assert.Equal(t, int64(12345), v) - v, err = fromString("123", jsonschema.NumberType) + v, err = FromString("123", NumberType) assert.NoError(t, err) assert.Equal(t, float64(123), v) - _, err = fromString("qrt", jsonschema.ArrayType) + _, err = FromString("qrt", ArrayType) assert.EqualError(t, err, "cannot parse string as object of type array. Value of string: \"qrt\"") - _, err = fromString("abc", jsonschema.IntegerType) + _, err = FromString("abc", IntegerType) assert.EqualError(t, err, "could not parse \"abc\" as a integer: strconv.ParseInt: parsing \"abc\": invalid syntax") - _, err = fromString("1.0", jsonschema.IntegerType) + _, err = FromString("1.0", IntegerType) assert.EqualError(t, err, "could not parse \"1.0\" as a integer: strconv.ParseInt: parsing \"1.0\": invalid syntax") - _, err = fromString("1.0", "foobar") + _, err = FromString("1.0", "foobar") assert.EqualError(t, err, "unknown json schema type: \"foobar\"") } diff --git a/libs/template/validators.go b/libs/jsonschema/validate_type.go similarity index 68% rename from libs/template/validators.go rename to libs/jsonschema/validate_type.go index 209700b63..125d6b20b 100644 --- a/libs/template/validators.go +++ b/libs/jsonschema/validate_type.go @@ -1,17 +1,15 @@ -package template +package jsonschema import ( "fmt" "reflect" "slices" - - "github.com/databricks/cli/libs/jsonschema" ) -type validator func(v any) error +type validateTypeFunc func(v any) error -func validateType(v any, fieldType jsonschema.Type) error { - validateFunc, ok := validators[fieldType] +func validateType(v any, fieldType Type) error { + validateFunc, ok := validateTypeFuncs[fieldType] if !ok { return nil } @@ -50,9 +48,9 @@ func validateInteger(v any) error { return nil } -var validators map[jsonschema.Type]validator = map[jsonschema.Type]validator{ - jsonschema.StringType: validateString, - jsonschema.BooleanType: validateBoolean, - jsonschema.IntegerType: validateInteger, - jsonschema.NumberType: validateNumber, +var validateTypeFuncs map[Type]validateTypeFunc = map[Type]validateTypeFunc{ + StringType: validateString, + BooleanType: validateBoolean, + IntegerType: validateInteger, + NumberType: validateNumber, } diff --git a/libs/template/validators_test.go b/libs/jsonschema/validate_type_test.go similarity index 75% rename from libs/template/validators_test.go rename to libs/jsonschema/validate_type_test.go index f34f037a1..36d9e5758 100644 --- a/libs/template/validators_test.go +++ b/libs/jsonschema/validate_type_test.go @@ -1,9 +1,8 @@ -package template +package jsonschema import ( "testing" - "github.com/databricks/cli/libs/jsonschema" "github.com/stretchr/testify/assert" ) @@ -77,53 +76,53 @@ func TestValidatorInt(t *testing.T) { func TestTemplateValidateType(t *testing.T) { // assert validation passing - err := validateType(int(0), jsonschema.IntegerType) + err := validateType(int(0), IntegerType) assert.NoError(t, err) - err = validateType(int32(1), jsonschema.IntegerType) + err = validateType(int32(1), IntegerType) assert.NoError(t, err) - err = validateType(int64(1), jsonschema.IntegerType) + err = validateType(int64(1), IntegerType) assert.NoError(t, err) - err = validateType(float32(1.1), jsonschema.NumberType) + err = validateType(float32(1.1), NumberType) assert.NoError(t, err) - err = validateType(float64(1.2), jsonschema.NumberType) + err = validateType(float64(1.2), NumberType) assert.NoError(t, err) - err = validateType(false, jsonschema.BooleanType) + err = validateType(false, BooleanType) assert.NoError(t, err) - err = validateType("abc", jsonschema.StringType) + err = validateType("abc", StringType) assert.NoError(t, err) // assert validation failing for integers - err = validateType(float64(1.2), jsonschema.IntegerType) + err = validateType(float64(1.2), IntegerType) assert.ErrorContains(t, err, "expected type integer, but value is 1.2") - err = validateType(true, jsonschema.IntegerType) + err = validateType(true, IntegerType) assert.ErrorContains(t, err, "expected type integer, but value is true") - err = validateType("abc", jsonschema.IntegerType) + err = validateType("abc", IntegerType) assert.ErrorContains(t, err, "expected type integer, but value is \"abc\"") // assert validation failing for floats - err = validateType(true, jsonschema.NumberType) + err = validateType(true, NumberType) assert.ErrorContains(t, err, "expected type float, but value is true") - err = validateType("abc", jsonschema.NumberType) + err = validateType("abc", NumberType) assert.ErrorContains(t, err, "expected type float, but value is \"abc\"") - err = validateType(int(1), jsonschema.NumberType) + err = validateType(int(1), NumberType) assert.ErrorContains(t, err, "expected type float, but value is 1") // assert validation failing for boolean - err = validateType(int(1), jsonschema.BooleanType) + err = validateType(int(1), BooleanType) assert.ErrorContains(t, err, "expected type boolean, but value is 1") - err = validateType(float64(1), jsonschema.BooleanType) + err = validateType(float64(1), BooleanType) assert.ErrorContains(t, err, "expected type boolean, but value is 1") - err = validateType("abc", jsonschema.BooleanType) + err = validateType("abc", BooleanType) assert.ErrorContains(t, err, "expected type boolean, but value is \"abc\"") // assert validation failing for string - err = validateType(int(1), jsonschema.StringType) + err = validateType(int(1), StringType) assert.ErrorContains(t, err, "expected type string, but value is 1") - err = validateType(float64(1), jsonschema.StringType) + err = validateType(float64(1), StringType) assert.ErrorContains(t, err, "expected type string, but value is 1") - err = validateType(false, jsonschema.StringType) + err = validateType(false, StringType) assert.ErrorContains(t, err, "expected type string, but value is false") } diff --git a/libs/template/config.go b/libs/template/config.go index 8a1ed6c82..6f980f613 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -2,12 +2,11 @@ package template import ( "context" - "encoding/json" "fmt" - "os" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/jsonschema" + "golang.org/x/exp/maps" ) type config struct { @@ -26,6 +25,9 @@ func newConfig(ctx context.Context, schemaPath string) (*config, error) { return nil, err } + // Do not allow template input variables that are not defined in the schema. + schema.AdditionalProperties = false + // Return config return &config{ ctx: ctx, @@ -45,32 +47,10 @@ func validateSchema(schema *jsonschema.Schema) error { // Reads json file at path and assigns values from the file func (c *config) assignValuesFromFile(path string) error { - // Read the config file - configFromFile := make(map[string]any, 0) - b, err := os.ReadFile(path) + // Load the config file. + configFromFile, err := c.schema.LoadInstance(path) if err != nil { - return err - } - err = json.Unmarshal(b, &configFromFile) - if err != nil { - return err - } - - // Cast any integer properties, from float to integer. Required because - // the json unmarshaller treats all json numbers as floating point - for name, floatVal := range configFromFile { - property, ok := c.schema.Properties[name] - if !ok { - return fmt.Errorf("%s is not defined as an input parameter for the template", name) - } - if property.Type != jsonschema.IntegerType { - continue - } - v, err := toInteger(floatVal) - if err != nil { - return fmt.Errorf("failed to cast value %v of property %s from file %s to an integer: %w", floatVal, name, path, err) - } - configFromFile[name] = v + return fmt.Errorf("failed to load config from file %s: %w", path, err) } // Write configs from the file to the input map, not overwriting any existing @@ -91,26 +71,11 @@ func (c *config) assignDefaultValues() error { if _, ok := c.values[name]; ok { continue } - // No default value defined for the property if property.Default == nil { continue } - - // Assign default value if property is not an integer - if property.Type != jsonschema.IntegerType { - c.values[name] = property.Default - continue - } - - // Cast default value to int before assigning to an integer configuration. - // Required because untyped field Default will read all numbers as floats - // during unmarshalling - v, err := toInteger(property.Default) - if err != nil { - return fmt.Errorf("failed to cast default value %v of property %s to an integer: %w", property.Default, name, err) - } - c.values[name] = v + c.values[name] = property.Default } return nil } @@ -130,7 +95,7 @@ func (c *config) promptForValues() error { var defaultVal string var err error if property.Default != nil { - defaultVal, err = toString(property.Default, property.Type) + defaultVal, err = jsonschema.ToString(property.Default, property.Type) if err != nil { return err } @@ -143,7 +108,7 @@ func (c *config) promptForValues() error { } // Convert user input string back to a value - c.values[name], err = fromString(userInput, property.Type) + c.values[name], err = jsonschema.FromString(userInput, property.Type) if err != nil { return err } @@ -163,42 +128,10 @@ func (c *config) promptOrAssignDefaultValues() error { // Validates the configuration. If passes, the configuration is ready to be used // to initialize the template. func (c *config) validate() error { - validateFns := []func() error{ - c.validateValuesDefined, - c.validateValuesType, - } - - for _, fn := range validateFns { - err := fn() - if err != nil { - return err - } - } - return nil -} - -// Validates all input properties have a user defined value assigned to them -func (c *config) validateValuesDefined() error { - for k := range c.schema.Properties { - if _, ok := c.values[k]; ok { - continue - } - return fmt.Errorf("no value has been assigned to input parameter %s", k) - } - return nil -} - -// Validates the types of all input properties values match their types defined in the schema -func (c *config) validateValuesType() error { - for k, v := range c.values { - fieldInfo, ok := c.schema.Properties[k] - if !ok { - return fmt.Errorf("%s is not defined as an input parameter for the template", k) - } - err := validateType(v, fieldInfo.Type) - if err != nil { - return fmt.Errorf("incorrect type for %s. %w", k, err) - } + // All properties in the JSON schema should have a value defined. + c.schema.Required = maps.Keys(c.schema.Properties) + if err := c.schema.ValidateInstance(c.values); err != nil { + return fmt.Errorf("validation for template input parameters failed. %w", err) } return nil } diff --git a/libs/template/config_test.go b/libs/template/config_test.go index 335242467..bba22c758 100644 --- a/libs/template/config_test.go +++ b/libs/template/config_test.go @@ -1,7 +1,7 @@ package template import ( - "encoding/json" + "context" "testing" "github.com/databricks/cli/libs/jsonschema" @@ -9,36 +9,14 @@ import ( "github.com/stretchr/testify/require" ) -func testSchema(t *testing.T) *jsonschema.Schema { - schemaJson := `{ - "properties": { - "int_val": { - "type": "integer", - "default": 123 - }, - "float_val": { - "type": "number" - }, - "bool_val": { - "type": "boolean" - }, - "string_val": { - "type": "string", - "default": "abc" - } - } - }` - var jsonSchema jsonschema.Schema - err := json.Unmarshal([]byte(schemaJson), &jsonSchema) +func testConfig(t *testing.T) *config { + c, err := newConfig(context.Background(), "./testdata/config-test-schema/test-schema.json") require.NoError(t, err) - return &jsonSchema + return c } func TestTemplateConfigAssignValuesFromFile(t *testing.T) { - c := config{ - schema: testSchema(t), - values: make(map[string]any), - } + c := testConfig(t) err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") assert.NoError(t, err) @@ -49,32 +27,17 @@ func TestTemplateConfigAssignValuesFromFile(t *testing.T) { assert.Equal(t, "hello", c.values["string_val"]) } -func TestTemplateConfigAssignValuesFromFileForUnknownField(t *testing.T) { - c := config{ - schema: testSchema(t), - values: make(map[string]any), - } - - err := c.assignValuesFromFile("./testdata/config-assign-from-file-unknown-property/config.json") - assert.EqualError(t, err, "unknown_prop is not defined as an input parameter for the template") -} - func TestTemplateConfigAssignValuesFromFileForInvalidIntegerValue(t *testing.T) { - c := config{ - schema: testSchema(t), - values: make(map[string]any), - } + c := testConfig(t) err := c.assignValuesFromFile("./testdata/config-assign-from-file-invalid-int/config.json") - assert.EqualError(t, err, "failed to cast value abc of property int_val from file ./testdata/config-assign-from-file-invalid-int/config.json to an integer: cannot convert \"abc\" to an integer") + assert.EqualError(t, err, "failed to load config from file ./testdata/config-assign-from-file-invalid-int/config.json: failed to parse property int_val: cannot convert \"abc\" to an integer") } func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *testing.T) { - c := config{ - schema: testSchema(t), - values: map[string]any{ - "string_val": "this-is-not-overwritten", - }, + c := testConfig(t) + c.values = map[string]any{ + "string_val": "this-is-not-overwritten", } err := c.assignValuesFromFile("./testdata/config-assign-from-file/config.json") @@ -87,10 +50,7 @@ func TestTemplateConfigAssignValuesFromFileDoesNotOverwriteExistingConfigs(t *te } func TestTemplateConfigAssignDefaultValues(t *testing.T) { - c := config{ - schema: testSchema(t), - values: make(map[string]any), - } + c := testConfig(t) err := c.assignDefaultValues() assert.NoError(t, err) @@ -101,65 +61,55 @@ func TestTemplateConfigAssignDefaultValues(t *testing.T) { } func TestTemplateConfigValidateValuesDefined(t *testing.T) { - c := config{ - schema: testSchema(t), - values: map[string]any{ - "int_val": 1, - "float_val": 1.0, - "bool_val": false, - }, + c := testConfig(t) + c.values = map[string]any{ + "int_val": 1, + "float_val": 1.0, + "bool_val": false, } - err := c.validateValuesDefined() - assert.EqualError(t, err, "no value has been assigned to input parameter string_val") + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. no value provided for required property string_val") } func TestTemplateConfigValidateTypeForValidConfig(t *testing.T) { - c := &config{ - schema: testSchema(t), - values: map[string]any{ - "int_val": 1, - "float_val": 1.1, - "bool_val": true, - "string_val": "abcd", - }, + c := testConfig(t) + c.values = map[string]any{ + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", } - err := c.validateValuesType() - assert.NoError(t, err) - - err = c.validate() + err := c.validate() assert.NoError(t, err) } func TestTemplateConfigValidateTypeForUnknownField(t *testing.T) { - c := &config{ - schema: testSchema(t), - values: map[string]any{ - "unknown_prop": 1, - }, + c := testConfig(t) + c.values = map[string]any{ + "unknown_prop": 1, + "int_val": 1, + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", } - err := c.validateValuesType() - assert.EqualError(t, err, "unknown_prop is not defined as an input parameter for the template") + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. property unknown_prop is not defined in the schema") } func TestTemplateConfigValidateTypeForInvalidType(t *testing.T) { - c := &config{ - schema: testSchema(t), - values: map[string]any{ - "int_val": "this-should-be-an-int", - "float_val": 1.1, - "bool_val": true, - "string_val": "abcd", - }, + c := testConfig(t) + c.values = map[string]any{ + "int_val": "this-should-be-an-int", + "float_val": 1.1, + "bool_val": true, + "string_val": "abcd", } - err := c.validateValuesType() - assert.EqualError(t, err, `incorrect type for int_val. expected type integer, but value is "this-should-be-an-int"`) - - err = c.validate() - assert.EqualError(t, err, `incorrect type for int_val. expected type integer, but value is "this-should-be-an-int"`) + err := c.validate() + assert.EqualError(t, err, "validation for template input parameters failed. incorrect type for property int_val: expected type integer, but value is \"this-should-be-an-int\"") } func TestTemplateValidateSchema(t *testing.T) { diff --git a/libs/template/testdata/config-test-schema/test-schema.json b/libs/template/testdata/config-test-schema/test-schema.json new file mode 100644 index 000000000..41eb82519 --- /dev/null +++ b/libs/template/testdata/config-test-schema/test-schema.json @@ -0,0 +1,18 @@ +{ + "properties": { + "int_val": { + "type": "integer", + "default": 123 + }, + "float_val": { + "type": "number" + }, + "bool_val": { + "type": "boolean" + }, + "string_val": { + "type": "string", + "default": "abc" + } + } +} From 5a14c7cb433a0b23c1703f56f68b5e65e0717714 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 7 Sep 2023 22:02:26 +0200 Subject: [PATCH 112/139] Generate unique name for a job in Python wheel test (#745) ## Changes Generate unique name for a job in Python wheel test --- .../bundles/python_wheel_task/databricks_template_schema.json | 4 ++++ .../bundles/python_wheel_task/template/databricks.yml.tmpl | 2 +- internal/bundle/python_wheel_test.go | 2 ++ 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json index b39a628c1..f7f4b6342 100644 --- a/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json +++ b/internal/bundle/bundles/python_wheel_task/databricks_template_schema.json @@ -12,6 +12,10 @@ "node_type_id": { "type": "string", "description": "Node type id for job cluster" + }, + "unique_id": { + "type": "string", + "description": "Unique ID for job name" } } } diff --git a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl index 4386879a4..a3201e03f 100644 --- a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl @@ -4,7 +4,7 @@ bundle: resources: jobs: some_other_job: - name: "[${bundle.target}] Test Wheel Job" + name: "[${bundle.target}] Test Wheel Job {{.unique_id}}" tasks: - task_key: TestTask new_cluster: diff --git a/internal/bundle/python_wheel_test.go b/internal/bundle/python_wheel_test.go index 52683edcc..ee5d897d6 100644 --- a/internal/bundle/python_wheel_test.go +++ b/internal/bundle/python_wheel_test.go @@ -4,6 +4,7 @@ import ( "testing" "github.com/databricks/cli/internal" + "github.com/google/uuid" "github.com/stretchr/testify/require" ) @@ -22,6 +23,7 @@ func TestAccPythonWheelTaskDeployAndRun(t *testing.T) { bundleRoot, err := initTestTemplate(t, "python_wheel_task", map[string]any{ "node_type_id": nodeTypeId, + "unique_id": uuid.New().String(), "spark_version": "13.2.x-snapshot-scala2.12", }) require.NoError(t, err) From 50eaf16307bae42a08edb506a9b9430de3eb0f1b Mon Sep 17 00:00:00 2001 From: Arpit Jasapara <87999496+arpitjasa-db@users.noreply.github.com> Date: Thu, 7 Sep 2023 14:54:31 -0700 Subject: [PATCH 113/139] Support Model Serving Endpoints in bundles (#682) ## Changes Add Model Serving Endpoints to Databricks Bundles ## Tests Unit tests and manual testing via https://github.com/databricks/bundle-examples-internal/pull/76 Screenshot 2023-08-28 at 7 46 23 PM Screenshot 2023-08-28 at 7 47 01 PM Signed-off-by: Arpit Jasapara --- bundle/config/mutator/process_target_mode.go | 6 ++ .../mutator/process_target_mode_test.go | 8 ++ bundle/config/resources.go | 21 ++++- .../resources/model_serving_endpoint.go | 24 ++++++ bundle/deploy/terraform/convert.go | 19 +++++ bundle/deploy/terraform/convert_test.go | 74 +++++++++++++++++ bundle/deploy/terraform/interpolate.go | 3 + bundle/schema/docs/bundle_descriptions.json | 81 +++++++++++++++++++ bundle/schema/openapi.go | 26 +++++- .../model_serving_endpoint/databricks.yml | 38 +++++++++ bundle/tests/model_serving_endpoint_test.go | 48 +++++++++++ 11 files changed, 342 insertions(+), 6 deletions(-) create mode 100644 bundle/config/resources/model_serving_endpoint.go create mode 100644 bundle/tests/model_serving_endpoint/databricks.yml create mode 100644 bundle/tests/model_serving_endpoint_test.go diff --git a/bundle/config/mutator/process_target_mode.go b/bundle/config/mutator/process_target_mode.go index 06ae7b858..93149ad04 100644 --- a/bundle/config/mutator/process_target_mode.go +++ b/bundle/config/mutator/process_target_mode.go @@ -77,6 +77,12 @@ func transformDevelopmentMode(b *bundle.Bundle) error { r.Experiments[i].Tags = append(r.Experiments[i].Tags, ml.ExperimentTag{Key: "dev", Value: b.Config.Workspace.CurrentUser.DisplayName}) } + for i := range r.ModelServingEndpoints { + prefix = "dev_" + b.Config.Workspace.CurrentUser.ShortName + "_" + r.ModelServingEndpoints[i].Name = prefix + r.ModelServingEndpoints[i].Name + // (model serving doesn't yet support tags) + } + return nil } diff --git a/bundle/config/mutator/process_target_mode_test.go b/bundle/config/mutator/process_target_mode_test.go index 489632e17..4ea33c70b 100644 --- a/bundle/config/mutator/process_target_mode_test.go +++ b/bundle/config/mutator/process_target_mode_test.go @@ -13,6 +13,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/databricks/databricks-sdk-go/service/serving" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -53,6 +54,9 @@ func mockBundle(mode config.Mode) *bundle.Bundle { Models: map[string]*resources.MlflowModel{ "model1": {Model: &ml.Model{Name: "model1"}}, }, + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "servingendpoint1": {CreateServingEndpoint: &serving.CreateServingEndpoint{Name: "servingendpoint1"}}, + }, }, }, } @@ -69,6 +73,7 @@ func TestProcessTargetModeDevelopment(t *testing.T) { assert.Equal(t, "/Users/lennart.kats@databricks.com/[dev lennart] experiment1", bundle.Config.Resources.Experiments["experiment1"].Name) assert.Equal(t, "[dev lennart] experiment2", bundle.Config.Resources.Experiments["experiment2"].Name) assert.Equal(t, "[dev lennart] model1", bundle.Config.Resources.Models["model1"].Name) + assert.Equal(t, "dev_lennart_servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) assert.Equal(t, "dev", bundle.Config.Resources.Experiments["experiment1"].Experiment.Tags[0].Key) assert.True(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) } @@ -82,6 +87,7 @@ func TestProcessTargetModeDefault(t *testing.T) { assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) } func TestProcessTargetModeProduction(t *testing.T) { @@ -109,6 +115,7 @@ func TestProcessTargetModeProduction(t *testing.T) { bundle.Config.Resources.Experiments["experiment1"].Permissions = permissions bundle.Config.Resources.Experiments["experiment2"].Permissions = permissions bundle.Config.Resources.Models["model1"].Permissions = permissions + bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Permissions = permissions err = validateProductionMode(context.Background(), bundle, false) require.NoError(t, err) @@ -116,6 +123,7 @@ func TestProcessTargetModeProduction(t *testing.T) { assert.Equal(t, "job1", bundle.Config.Resources.Jobs["job1"].Name) assert.Equal(t, "pipeline1", bundle.Config.Resources.Pipelines["pipeline1"].Name) assert.False(t, bundle.Config.Resources.Pipelines["pipeline1"].PipelineSpec.Development) + assert.Equal(t, "servingendpoint1", bundle.Config.Resources.ModelServingEndpoints["servingendpoint1"].Name) } func TestProcessTargetModeProductionOkForPrincipal(t *testing.T) { diff --git a/bundle/config/resources.go b/bundle/config/resources.go index 5d47b918c..c239b510b 100644 --- a/bundle/config/resources.go +++ b/bundle/config/resources.go @@ -11,8 +11,9 @@ type Resources struct { Jobs map[string]*resources.Job `json:"jobs,omitempty"` Pipelines map[string]*resources.Pipeline `json:"pipelines,omitempty"` - Models map[string]*resources.MlflowModel `json:"models,omitempty"` - Experiments map[string]*resources.MlflowExperiment `json:"experiments,omitempty"` + Models map[string]*resources.MlflowModel `json:"models,omitempty"` + Experiments map[string]*resources.MlflowExperiment `json:"experiments,omitempty"` + ModelServingEndpoints map[string]*resources.ModelServingEndpoint `json:"model_serving_endpoints,omitempty"` } type UniqueResourceIdTracker struct { @@ -93,6 +94,19 @@ func (r *Resources) VerifyUniqueResourceIdentifiers() (*UniqueResourceIdTracker, tracker.Type[k] = "mlflow_experiment" tracker.ConfigPath[k] = r.Experiments[k].ConfigFilePath } + for k := range r.ModelServingEndpoints { + if _, ok := tracker.Type[k]; ok { + return tracker, fmt.Errorf("multiple resources named %s (%s at %s, %s at %s)", + k, + tracker.Type[k], + tracker.ConfigPath[k], + "model_serving_endpoint", + r.ModelServingEndpoints[k].ConfigFilePath, + ) + } + tracker.Type[k] = "model_serving_endpoint" + tracker.ConfigPath[k] = r.ModelServingEndpoints[k].ConfigFilePath + } return tracker, nil } @@ -112,6 +126,9 @@ func (r *Resources) SetConfigFilePath(path string) { for _, e := range r.Experiments { e.ConfigFilePath = path } + for _, e := range r.ModelServingEndpoints { + e.ConfigFilePath = path + } } // MergeJobClusters iterates over all jobs and merges their job clusters. diff --git a/bundle/config/resources/model_serving_endpoint.go b/bundle/config/resources/model_serving_endpoint.go new file mode 100644 index 000000000..dccecaa6f --- /dev/null +++ b/bundle/config/resources/model_serving_endpoint.go @@ -0,0 +1,24 @@ +package resources + +import ( + "github.com/databricks/cli/bundle/config/paths" + "github.com/databricks/databricks-sdk-go/service/serving" +) + +type ModelServingEndpoint struct { + // This represents the input args for terraform, and will get converted + // to a HCL representation for CRUD + *serving.CreateServingEndpoint + + // This represents the id (ie serving_endpoint_id) that can be used + // as a reference in other resources. This value is returned by terraform. + ID string + + // Local path where the bundle is defined. All bundle resources include + // this for interpolation purposes. + paths.Paths + + // This is a resource agnostic implementation of permissions for ACLs. + // Implementation could be different based on the resource type. + Permissions []Permission `json:"permissions,omitempty"` +} diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index cd480c898..0956ea7bb 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -161,6 +161,19 @@ func BundleToTerraform(config *config.Root) (*schema.Root, bool) { } } + for k, src := range config.Resources.ModelServingEndpoints { + noResources = false + var dst schema.ResourceModelServing + conv(src, &dst) + tfroot.Resource.ModelServing[k] = &dst + + // Configure permissions for this resource. + if rp := convPermissions(src.Permissions); rp != nil { + rp.ServingEndpointId = fmt.Sprintf("${databricks_model_serving.%s.serving_endpoint_id}", k) + tfroot.Resource.Permissions["model_serving_"+k] = rp + } + } + return tfroot, noResources } @@ -196,6 +209,12 @@ func TerraformToBundle(state *tfjson.State, config *config.Root) error { cur := config.Resources.Experiments[resource.Name] conv(tmp, &cur) config.Resources.Experiments[resource.Name] = cur + case "databricks_model_serving": + var tmp schema.ResourceModelServing + conv(resource.AttributeValues, &tmp) + cur := config.Resources.ModelServingEndpoints[resource.Name] + conv(tmp, &cur) + config.Resources.ModelServingEndpoints[resource.Name] = cur case "databricks_permissions": // Ignore; no need to pull these back into the configuration. default: diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index 34a65d70d..ad6266066 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/databricks/databricks-sdk-go/service/ml" "github.com/databricks/databricks-sdk-go/service/pipelines" + "github.com/databricks/databricks-sdk-go/service/serving" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -292,3 +293,76 @@ func TestConvertExperimentPermissions(t *testing.T) { assert.Equal(t, "CAN_READ", p.PermissionLevel) } + +func TestConvertModelServing(t *testing.T) { + var src = resources.ModelServingEndpoint{ + CreateServingEndpoint: &serving.CreateServingEndpoint{ + Name: "name", + Config: serving.EndpointCoreConfigInput{ + ServedModels: []serving.ServedModelInput{ + { + ModelName: "model_name", + ModelVersion: "1", + ScaleToZeroEnabled: true, + WorkloadSize: "Small", + }, + }, + TrafficConfig: &serving.TrafficConfig{ + Routes: []serving.Route{ + { + ServedModelName: "model_name-1", + TrafficPercentage: 100, + }, + }, + }, + }, + }, + } + + var config = config.Root{ + Resources: config.Resources{ + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "my_model_serving_endpoint": &src, + }, + }, + } + + out, _ := BundleToTerraform(&config) + resource := out.Resource.ModelServing["my_model_serving_endpoint"] + assert.Equal(t, "name", resource.Name) + assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName) + assert.Equal(t, "1", resource.Config.ServedModels[0].ModelVersion) + assert.Equal(t, true, resource.Config.ServedModels[0].ScaleToZeroEnabled) + assert.Equal(t, "Small", resource.Config.ServedModels[0].WorkloadSize) + assert.Equal(t, "model_name-1", resource.Config.TrafficConfig.Routes[0].ServedModelName) + assert.Equal(t, 100, resource.Config.TrafficConfig.Routes[0].TrafficPercentage) + assert.Nil(t, out.Data) +} + +func TestConvertModelServingPermissions(t *testing.T) { + var src = resources.ModelServingEndpoint{ + Permissions: []resources.Permission{ + { + Level: "CAN_VIEW", + UserName: "jane@doe.com", + }, + }, + } + + var config = config.Root{ + Resources: config.Resources{ + ModelServingEndpoints: map[string]*resources.ModelServingEndpoint{ + "my_model_serving_endpoint": &src, + }, + }, + } + + out, _ := BundleToTerraform(&config) + assert.NotEmpty(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].ServingEndpointId) + assert.Len(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].AccessControl, 1) + + p := out.Resource.Permissions["model_serving_my_model_serving_endpoint"].AccessControl[0] + assert.Equal(t, "jane@doe.com", p.UserName) + assert.Equal(t, "CAN_VIEW", p.PermissionLevel) + +} diff --git a/bundle/deploy/terraform/interpolate.go b/bundle/deploy/terraform/interpolate.go index dd1dcbb88..ea3c99aa1 100644 --- a/bundle/deploy/terraform/interpolate.go +++ b/bundle/deploy/terraform/interpolate.go @@ -25,6 +25,9 @@ func interpolateTerraformResourceIdentifiers(path string, lookup map[string]stri case "experiments": path = strings.Join(append([]string{"databricks_mlflow_experiment"}, parts[2:]...), interpolation.Delimiter) return fmt.Sprintf("${%s}", path), nil + case "model_serving_endpoints": + path = strings.Join(append([]string{"databricks_model_serving"}, parts[2:]...), interpolation.Delimiter) + return fmt.Sprintf("${%s}", path), nil default: panic("TODO: " + parts[1]) } diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index 84f0492fb..ffdb56298 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -1441,6 +1441,87 @@ } } }, + "model_serving_endpoints": { + "description": "List of Model Serving Endpoints", + "additionalproperties": { + "description": "", + "properties": { + "name": { + "description": "The name of the model serving endpoint. This field is required and must be unique across a workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores. NOTE: Changing this name will delete the existing endpoint and create a new endpoint with the update name." + }, + "permissions": { + "description": "", + "items": { + "description": "", + "properties": { + "group_name": { + "description": "" + }, + "level": { + "description": "" + }, + "service_principal_name": { + "description": "" + }, + "user_name": { + "description": "" + } + } + } + }, + "config": { + "description": "The model serving endpoint configuration.", + "properties": { + "description": "", + "properties": { + "served_models": { + "description": "Each block represents a served model for the endpoint to serve. A model serving endpoint can have up to 10 served models.", + "items": { + "description": "", + "properties" : { + "name": { + "description": "The name of a served model. It must be unique across an endpoint. If not specified, this field will default to modelname-modelversion. A served model name can consist of alphanumeric characters, dashes, and underscores." + }, + "model_name": { + "description": "The name of the model in Databricks Model Registry to be served." + }, + "model_version": { + "description": "The version of the model in Databricks Model Registry to be served." + }, + "workload_size": { + "description": "The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are \"Small\" (4 - 4 provisioned concurrency), \"Medium\" (8 - 16 provisioned concurrency), and \"Large\" (16 - 64 provisioned concurrency)." + }, + "scale_to_zero_enabled": { + "description": "Whether the compute resources for the served model should scale down to zero. If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0." + } + } + } + }, + "traffic_config": { + "description": "A single block represents the traffic split configuration amongst the served models.", + "properties": { + "routes": { + "description": "Each block represents a route that defines traffic to each served model. Each served_models block needs to have a corresponding routes block.", + "items": { + "description": "", + "properties": { + "served_model_name": { + "description": "The name of the served model this route configures traffic for. This needs to match the name of a served_models block." + }, + "traffic_percentage": { + "description": "The percentage of endpoint traffic to send to this route. It must be an integer between 0 and 100 inclusive." + } + } + } + } + } + } + } + } + } + } + } + }, "pipelines": { "description": "List of DLT pipelines", "additionalproperties": { diff --git a/bundle/schema/openapi.go b/bundle/schema/openapi.go index b0d676576..1a8b76ed9 100644 --- a/bundle/schema/openapi.go +++ b/bundle/schema/openapi.go @@ -210,6 +210,19 @@ func (reader *OpenapiReader) modelsDocs() (*Docs, error) { return modelsDocs, nil } +func (reader *OpenapiReader) modelServingEndpointsDocs() (*Docs, error) { + modelServingEndpointsSpecSchema, err := reader.readResolvedSchema(SchemaPathPrefix + "serving.CreateServingEndpoint") + if err != nil { + return nil, err + } + modelServingEndpointsDocs := schemaToDocs(modelServingEndpointsSpecSchema) + modelServingEndpointsAllDocs := &Docs{ + Description: "List of Model Serving Endpoints", + AdditionalProperties: modelServingEndpointsDocs, + } + return modelServingEndpointsAllDocs, nil +} + func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) { jobsDocs, err := reader.jobsDocs() if err != nil { @@ -227,14 +240,19 @@ func (reader *OpenapiReader) ResourcesDocs() (*Docs, error) { if err != nil { return nil, err } + modelServingEndpointsDocs, err := reader.modelServingEndpointsDocs() + if err != nil { + return nil, err + } return &Docs{ Description: "Collection of Databricks resources to deploy.", Properties: map[string]*Docs{ - "jobs": jobsDocs, - "pipelines": pipelinesDocs, - "experiments": experimentsDocs, - "models": modelsDocs, + "jobs": jobsDocs, + "pipelines": pipelinesDocs, + "experiments": experimentsDocs, + "models": modelsDocs, + "model_serving_endpoints": modelServingEndpointsDocs, }, }, nil } diff --git a/bundle/tests/model_serving_endpoint/databricks.yml b/bundle/tests/model_serving_endpoint/databricks.yml new file mode 100644 index 000000000..e4fb54a1f --- /dev/null +++ b/bundle/tests/model_serving_endpoint/databricks.yml @@ -0,0 +1,38 @@ +resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-endpoint" + config: + served_models: + - model_name: "model-name" + model_version: "1" + workload_size: "Small" + scale_to_zero_enabled: true + traffic_config: + routes: + - served_model_name: "model-name-1" + traffic_percentage: 100 + permissions: + - level: CAN_QUERY + group_name: users + +targets: + development: + mode: development + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-dev-endpoint" + + staging: + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-staging-endpoint" + + production: + mode: production + resources: + model_serving_endpoints: + my_model_serving_endpoint: + name: "my-prod-endpoint" diff --git a/bundle/tests/model_serving_endpoint_test.go b/bundle/tests/model_serving_endpoint_test.go new file mode 100644 index 000000000..bfa1a31b4 --- /dev/null +++ b/bundle/tests/model_serving_endpoint_test.go @@ -0,0 +1,48 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/config/resources" + "github.com/stretchr/testify/assert" +) + +func assertExpected(t *testing.T, p *resources.ModelServingEndpoint) { + assert.Equal(t, "model_serving_endpoint/databricks.yml", filepath.ToSlash(p.ConfigFilePath)) + assert.Equal(t, "model-name", p.Config.ServedModels[0].ModelName) + assert.Equal(t, "1", p.Config.ServedModels[0].ModelVersion) + assert.Equal(t, "model-name-1", p.Config.TrafficConfig.Routes[0].ServedModelName) + assert.Equal(t, 100, p.Config.TrafficConfig.Routes[0].TrafficPercentage) + assert.Equal(t, "users", p.Permissions[0].GroupName) + assert.Equal(t, "CAN_QUERY", p.Permissions[0].Level) +} + +func TestModelServingEndpointDevelopment(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "development") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + assert.Equal(t, b.Config.Bundle.Mode, config.Development) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-dev-endpoint", p.Name) + assertExpected(t, p) +} + +func TestModelServingEndpointStaging(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "staging") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-staging-endpoint", p.Name) + assertExpected(t, p) +} + +func TestModelServingEndpointProduction(t *testing.T) { + b := loadTarget(t, "./model_serving_endpoint", "production") + assert.Len(t, b.Config.Resources.ModelServingEndpoints, 1) + + p := b.Config.Resources.ModelServingEndpoints["my_model_serving_endpoint"] + assert.Equal(t, "my-prod-endpoint", p.Name) + assertExpected(t, p) +} From 17d9f7dd2a340e485ac4c783f59e5909d9cbd76e Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 11:19:55 +0200 Subject: [PATCH 114/139] Use unique bundle root path for Python E2E test (#748) ## Changes It helps to make sure jobs in the tests are deployed and executed uniquely and isolated ``` Bundle remote directory is /Users/61b77d30-bc10-4214-9650-29cf5db0e941/.bundle/4b630810-5edc-4d8f-85d1-0eb5baf7bb28 Deleted snapshot file at /var/folders/nt/xjv68qzs45319w4k36dhpylc0000gp/T/TestAccPythonWheelTaskDeployAndRun3933198431/001/.databricks/bundle/default/sync-snapshots/dd9db100465e3d91.json Successfully deleted files! --- PASS: TestAccPythonWheelTaskDeployAndRun (346.28s) PASS coverage: 93.5% of statements in ./... ok github.com/databricks/cli/internal/bundle 346.976s coverage: 93.5% of statements in ./... ``` --- .../bundles/python_wheel_task/template/databricks.yml.tmpl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl index a3201e03f..e715cdf1e 100644 --- a/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl +++ b/internal/bundle/bundles/python_wheel_task/template/databricks.yml.tmpl @@ -1,6 +1,9 @@ bundle: name: wheel-task +workspace: + root_path: "~/.bundle/{{.unique_id}}" + resources: jobs: some_other_job: From e08f419ef68faad79d1976181ff517c63fc8fe0a Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 11:52:45 +0200 Subject: [PATCH 115/139] Do not include empty output in job run output (#749) ## Changes Do not include empty output in job run output ## Tests Running a job from CLI, the result: ``` andrew.nester@HFW9Y94129 wheel % databricks bundle run some_other_job --output json Run URL: https://***/?o=6051921418418893#job/780620378804085/run/386695528477456 2023-09-08 11:33:24 "[default] My Wheel Job" TERMINATED SUCCESS { "task_outputs": [ { "TaskKey": "TestTask", "Output": { "result": "Hello from my func\nGot arguments v2:\n['python']\n" }, "EndTime": 1694165597474 } ] ``` --- bundle/run/output/job.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/bundle/run/output/job.go b/bundle/run/output/job.go index 4bea4c7ad..6199ac2f7 100644 --- a/bundle/run/output/job.go +++ b/bundle/run/output/job.go @@ -60,7 +60,7 @@ func GetJobOutput(ctx context.Context, w *databricks.WorkspaceClient, runId int6 return nil, err } result := &JobOutput{ - TaskOutputs: make([]TaskOutput, len(jobRun.Tasks)), + TaskOutputs: make([]TaskOutput, 0), } for _, task := range jobRun.Tasks { jobRunOutput, err := w.Jobs.GetRunOutput(ctx, jobs.GetRunOutputRequest{ @@ -69,7 +69,11 @@ func GetJobOutput(ctx context.Context, w *databricks.WorkspaceClient, runId int6 if err != nil { return nil, err } - task := TaskOutput{TaskKey: task.TaskKey, Output: toRunOutput(jobRunOutput), EndTime: task.EndTime} + out := toRunOutput(jobRunOutput) + if out == nil { + continue + } + task := TaskOutput{TaskKey: task.TaskKey, Output: out, EndTime: task.EndTime} result.TaskOutputs = append(result.TaskOutputs, task) } return result, nil From e64463ba47c9b86874ad32b873103bdb003463f6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 11:53:57 +0200 Subject: [PATCH 116/139] Fixed marking libraries from DBFS as remote (#750) ## Changes Fixed marking libraries from DBFS as remote ## Tests Updated unit tests to catch the regression --- bundle/config/mutator/translate_paths_test.go | 4 ++-- bundle/libraries/libraries.go | 4 ++-- bundle/libraries/libraries_test.go | 1 + bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml | 2 +- 4 files changed, 6 insertions(+), 5 deletions(-) diff --git a/bundle/config/mutator/translate_paths_test.go b/bundle/config/mutator/translate_paths_test.go index e7ac5e8af..f7edee30a 100644 --- a/bundle/config/mutator/translate_paths_test.go +++ b/bundle/config/mutator/translate_paths_test.go @@ -162,7 +162,7 @@ func TestTranslatePaths(t *testing.T) { MainClassName: "HelloWorldRemote", }, Libraries: []compute.Library{ - {Jar: "dbfs:///bundle/dist/task_remote.jar"}, + {Jar: "dbfs:/bundle/dist/task_remote.jar"}, }, }, }, @@ -243,7 +243,7 @@ func TestTranslatePaths(t *testing.T) { ) assert.Equal( t, - "dbfs:///bundle/dist/task_remote.jar", + "dbfs:/bundle/dist/task_remote.jar", bundle.Config.Resources.Jobs["job"].Tasks[6].Libraries[0].Jar, ) diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index d26768f95..076180f46 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -165,8 +165,8 @@ func isRemoteStorageScheme(path string) bool { return false } - // If the path starts with scheme:// format, it's a correct remote storage scheme - return strings.HasPrefix(path, url.Scheme+"://") + // If the path starts with scheme:/ format, it's a correct remote storage scheme + return strings.HasPrefix(path, url.Scheme+":/") } diff --git a/bundle/libraries/libraries_test.go b/bundle/libraries/libraries_test.go index 050efe749..7ff1609ab 100644 --- a/bundle/libraries/libraries_test.go +++ b/bundle/libraries/libraries_test.go @@ -16,6 +16,7 @@ var testCases map[string]bool = map[string]bool{ "file://path/to/package": true, "C:\\path\\to\\package": true, "dbfs://path/to/package": false, + "dbfs:/path/to/package": false, "s3://path/to/package": false, "abfss://path/to/package": false, } diff --git a/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml index 54577d658..07f4957bb 100644 --- a/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml +++ b/bundle/tests/bundle/python_wheel_dbfs_lib/bundle.yml @@ -12,4 +12,4 @@ resources: package_name: "my_test_code" entry_point: "run" libraries: - - whl: dbfs://path/to/dist/mywheel.whl + - whl: dbfs:/path/to/dist/mywheel.whl From f7566b82648ab317a7ad6e875eb5eac40f09fcd6 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 12:47:17 +0200 Subject: [PATCH 117/139] Close local Terraform state file when pushing to remote (#752) ## Changes Close local Terraform state file when pushing to remote Should help fix E2E test cleanup ``` testing.go:1225: TempDir RemoveAll cleanup: remove C:\Users\RUNNER~1\AppData\Local\Temp\TestAccPythonWheelTaskDeployAndRun1395546390\001\.databricks\bundle\default\terraform\terraform.tfstate: The process cannot access the file because it is being used by another process. ``` --- bundle/deploy/terraform/state_push.go | 1 + 1 file changed, 1 insertion(+) diff --git a/bundle/deploy/terraform/state_push.go b/bundle/deploy/terraform/state_push.go index 0b4c5dbfa..0cd69e522 100644 --- a/bundle/deploy/terraform/state_push.go +++ b/bundle/deploy/terraform/state_push.go @@ -32,6 +32,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { if err != nil { return err } + defer local.Close() // Upload state file from local cache directory to filer. log.Infof(ctx, "Writing local state file to remote state directory") From 67af171a68209b49c9d20e2e9cbdcf22500eebc3 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 13:08:21 +0200 Subject: [PATCH 118/139] Process only Python wheel tasks which have local libraries used (#751) ## Changes Process only Python wheel tasks which have local libraries used ## Tests Updated uni test to catch the regression --- bundle/artifacts/whl/autodetect.go | 4 ++-- bundle/artifacts/whl/from_libraries.go | 2 +- bundle/libraries/libraries.go | 14 ++++++++++++-- bundle/python/transform.go | 5 +++-- bundle/python/transform_test.go | 11 +++++++++++ 5 files changed, 29 insertions(+), 7 deletions(-) diff --git a/bundle/artifacts/whl/autodetect.go b/bundle/artifacts/whl/autodetect.go index 41d80bb76..29031e86d 100644 --- a/bundle/artifacts/whl/autodetect.go +++ b/bundle/artifacts/whl/autodetect.go @@ -27,9 +27,9 @@ func (m *detectPkg) Name() string { } func (m *detectPkg) Apply(ctx context.Context, b *bundle.Bundle) error { - wheelTasks := libraries.FindAllWheelTasks(b) + wheelTasks := libraries.FindAllWheelTasksWithLocalLibraries(b) if len(wheelTasks) == 0 { - log.Infof(ctx, "No wheel tasks in databricks.yml config, skipping auto detect") + log.Infof(ctx, "No local wheel tasks in databricks.yml config, skipping auto detect") return nil } cmdio.LogString(ctx, "artifacts.whl.AutoDetect: Detecting Python wheel project...") diff --git a/bundle/artifacts/whl/from_libraries.go b/bundle/artifacts/whl/from_libraries.go index 855e5b943..9d35f6314 100644 --- a/bundle/artifacts/whl/from_libraries.go +++ b/bundle/artifacts/whl/from_libraries.go @@ -26,7 +26,7 @@ func (*fromLibraries) Apply(ctx context.Context, b *bundle.Bundle) error { return nil } - tasks := libraries.FindAllWheelTasks(b) + tasks := libraries.FindAllWheelTasksWithLocalLibraries(b) for _, task := range tasks { for _, lib := range task.Libraries { matches, err := filepath.Glob(filepath.Join(b.Config.Path, lib.Whl)) diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index 076180f46..d9a257db8 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -56,11 +56,11 @@ func findAllTasks(b *bundle.Bundle) []*jobs.Task { return result } -func FindAllWheelTasks(b *bundle.Bundle) []*jobs.Task { +func FindAllWheelTasksWithLocalLibraries(b *bundle.Bundle) []*jobs.Task { tasks := findAllTasks(b) wheelTasks := make([]*jobs.Task, 0) for _, task := range tasks { - if task.PythonWheelTask != nil { + if task.PythonWheelTask != nil && IsTaskWithLocalLibraries(task) { wheelTasks = append(wheelTasks, task) } } @@ -68,6 +68,16 @@ func FindAllWheelTasks(b *bundle.Bundle) []*jobs.Task { return wheelTasks } +func IsTaskWithLocalLibraries(task *jobs.Task) bool { + for _, l := range task.Libraries { + if isLocalLibrary(&l) { + return true + } + } + + return false +} + func isMissingRequiredLibraries(task *jobs.Task) bool { if task.Libraries != nil { return false diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 53db450b5..3d744df9d 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -7,6 +7,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/libraries" "github.com/databricks/databricks-sdk-go/service/jobs" ) @@ -72,8 +73,8 @@ func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { for i := range tasks { task := &tasks[i] - // Keep only Python wheel tasks - if task.PythonWheelTask == nil { + // Keep only Python wheel tasks with local libraries referenced + if task.PythonWheelTask == nil || !libraries.IsTaskWithLocalLibraries(task) { continue } diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index a9f57db8e..99d3129d8 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/paths" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/databricks-sdk-go/service/compute" "github.com/databricks/databricks-sdk-go/service/jobs" "github.com/stretchr/testify/require" ) @@ -82,11 +83,21 @@ func TestTransformFiltersWheelTasksOnly(t *testing.T) { { TaskKey: "key1", PythonWheelTask: &jobs.PythonWheelTask{}, + Libraries: []compute.Library{ + {Whl: "./dist/test.whl"}, + }, }, { TaskKey: "key2", NotebookTask: &jobs.NotebookTask{}, }, + { + TaskKey: "key3", + PythonWheelTask: &jobs.PythonWheelTask{}, + Libraries: []compute.Library{ + {Whl: "dbfs:/FileStore/dist/test.whl"}, + }, + }, }, }, }, From 368321d07ddb2357361f2a94e0ba7e25bc508d99 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 13:24:51 +0200 Subject: [PATCH 119/139] Close python wheel directory file descriptor after read (#753) ## Changes Close python wheel directory file descriptor after read --- python/utils.go | 2 ++ 1 file changed, 2 insertions(+) diff --git a/python/utils.go b/python/utils.go index a8408fae2..47d5462d2 100644 --- a/python/utils.go +++ b/python/utils.go @@ -30,6 +30,8 @@ func FindFilesWithSuffixInPath(dir, suffix string) []string { log.Debugf(context.Background(), "open dir %s: %s", dir, err) return nil } + defer f.Close() + entries, err := f.ReadDir(0) if err != nil { log.Debugf(context.Background(), "read dir %s: %s", dir, err) From 7c96270db8c95a54e8d59893c15e370dff1f2f76 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Fri, 8 Sep 2023 14:07:22 +0200 Subject: [PATCH 120/139] Add enum support for bundle templates (#668) ## Changes This PR includes: 1. Adding enum field to the json schema struct 2. Adding prompting logic for enum values. See demo for how it looks 3. Validation rules, validating the default value and config values when an enum list is specified This will now enable template authors to use enums for input parameters. ## Tests Manually and new unit tests --- libs/cmdio/logger.go | 31 ++++++++++ libs/cmdio/logger_test.go | 9 +++ libs/jsonschema/instance.go | 34 +++++++++-- libs/jsonschema/instance_test.go | 26 ++++++++ libs/jsonschema/schema.go | 34 +++++++++++ libs/jsonschema/schema_test.go | 60 +++++++++++++++++++ .../instance-validate/test-schema-enum.json | 12 ++++ .../schema-load-int/schema-invalid-enum.json | 10 ++++ .../schema-load-int/schema-valid.json | 3 +- libs/jsonschema/utils.go | 12 ++++ libs/jsonschema/utils_test.go | 10 ++++ libs/template/config.go | 20 ++++++- libs/template/config_test.go | 27 +++++++++ 13 files changed, 278 insertions(+), 10 deletions(-) create mode 100644 libs/jsonschema/testdata/instance-validate/test-schema-enum.json create mode 100644 libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json diff --git a/libs/cmdio/logger.go b/libs/cmdio/logger.go index 0663306e1..7d760b998 100644 --- a/libs/cmdio/logger.go +++ b/libs/cmdio/logger.go @@ -10,6 +10,7 @@ import ( "strings" "github.com/databricks/cli/libs/flags" + "github.com/manifoldco/promptui" ) // This is the interface for all io interactions with a user @@ -104,6 +105,36 @@ func AskYesOrNo(ctx context.Context, question string) (bool, error) { return false, nil } +func AskSelect(ctx context.Context, question string, choices []string) (string, error) { + logger, ok := FromContext(ctx) + if !ok { + logger = Default() + } + return logger.AskSelect(question, choices) +} + +func (l *Logger) AskSelect(question string, choices []string) (string, error) { + if l.Mode == flags.ModeJson { + return "", fmt.Errorf("question prompts are not supported in json mode") + } + + prompt := promptui.Select{ + Label: question, + Items: choices, + HideHelp: true, + Templates: &promptui.SelectTemplates{ + Label: "{{.}}: ", + Selected: fmt.Sprintf("%s: {{.}}", question), + }, + } + + _, ans, err := prompt.Run() + if err != nil { + return "", err + } + return ans, nil +} + func (l *Logger) Ask(question string, defaultVal string) (string, error) { if l.Mode == flags.ModeJson { return "", fmt.Errorf("question prompts are not supported in json mode") diff --git a/libs/cmdio/logger_test.go b/libs/cmdio/logger_test.go index da6190462..c5c00d022 100644 --- a/libs/cmdio/logger_test.go +++ b/libs/cmdio/logger_test.go @@ -1,6 +1,7 @@ package cmdio import ( + "context" "testing" "github.com/databricks/cli/libs/flags" @@ -12,3 +13,11 @@ func TestAskFailedInJsonMode(t *testing.T) { _, err := l.Ask("What is your spirit animal?", "") assert.ErrorContains(t, err, "question prompts are not supported in json mode") } + +func TestAskChoiceFailsInJsonMode(t *testing.T) { + l := NewLogger(flags.ModeJson) + ctx := NewContext(context.Background(), l) + + _, err := AskSelect(ctx, "what is a question?", []string{"b", "c", "a"}) + assert.EqualError(t, err, "question prompts are not supported in json mode") +} diff --git a/libs/jsonschema/instance.go b/libs/jsonschema/instance.go index 02ab9f281..229a45b53 100644 --- a/libs/jsonschema/instance.go +++ b/libs/jsonschema/instance.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "os" + "slices" ) // Load a JSON document and validate it against the JSON schema. Instance here @@ -39,13 +40,18 @@ func (s *Schema) LoadInstance(path string) (map[string]any, error) { } func (s *Schema) ValidateInstance(instance map[string]any) error { - if err := s.validateAdditionalProperties(instance); err != nil { - return err + for _, fn := range []func(map[string]any) error{ + s.validateAdditionalProperties, + s.validateEnum, + s.validateRequired, + s.validateTypes, + } { + err := fn(instance) + if err != nil { + return err + } } - if err := s.validateRequired(instance); err != nil { - return err - } - return s.validateTypes(instance) + return nil } // If additional properties is set to false, this function validates instance only @@ -89,3 +95,19 @@ func (s *Schema) validateTypes(instance map[string]any) error { } return nil } + +func (s *Schema) validateEnum(instance map[string]any) error { + for k, v := range instance { + fieldInfo, ok := s.Properties[k] + if !ok { + continue + } + if fieldInfo.Enum == nil { + continue + } + if !slices.Contains(fieldInfo.Enum, v) { + return fmt.Errorf("expected value of property %s to be one of %v. Found: %v", k, fieldInfo.Enum, v) + } + } + return nil +} diff --git a/libs/jsonschema/instance_test.go b/libs/jsonschema/instance_test.go index d5e0766dd..ffd10ca43 100644 --- a/libs/jsonschema/instance_test.go +++ b/libs/jsonschema/instance_test.go @@ -127,3 +127,29 @@ func TestLoadInstance(t *testing.T) { _, err = schema.LoadInstance("./testdata/instance-load/invalid-type-instance.json") assert.EqualError(t, err, "incorrect type for property string_val: expected type string, but value is 123") } + +func TestValidateInstanceEnum(t *testing.T) { + schema, err := Load("./testdata/instance-validate/test-schema-enum.json") + require.NoError(t, err) + + validInstance := map[string]any{ + "foo": "b", + "bar": int64(6), + } + assert.NoError(t, schema.validateEnum(validInstance)) + assert.NoError(t, schema.ValidateInstance(validInstance)) + + invalidStringInstance := map[string]any{ + "foo": "d", + "bar": int64(2), + } + assert.EqualError(t, schema.validateEnum(invalidStringInstance), "expected value of property foo to be one of [a b c]. Found: d") + assert.EqualError(t, schema.ValidateInstance(invalidStringInstance), "expected value of property foo to be one of [a b c]. Found: d") + + invalidIntInstance := map[string]any{ + "foo": "a", + "bar": int64(1), + } + assert.EqualError(t, schema.validateEnum(invalidIntInstance), "expected value of property bar to be one of [2 4 6]. Found: 1") + assert.EqualError(t, schema.ValidateInstance(invalidIntInstance), "expected value of property bar to be one of [2 4 6]. Found: 1") +} diff --git a/libs/jsonschema/schema.go b/libs/jsonschema/schema.go index 44c65ecc6..108102a64 100644 --- a/libs/jsonschema/schema.go +++ b/libs/jsonschema/schema.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "os" + "slices" ) // defines schema for a json object @@ -41,6 +42,9 @@ type Schema struct { // Default value for the property / object Default any `json:"default,omitempty"` + // List of valid values for a JSON instance for this schema. + Enum []any `json:"enum,omitempty"` + // Extension embeds our custom JSON schema extensions. Extension } @@ -84,6 +88,30 @@ func (schema *Schema) validate() error { } } + // Validate enum field values for properties are consistent with types. + for name, property := range schema.Properties { + if property.Enum == nil { + continue + } + for i, enum := range property.Enum { + err := validateType(enum, property.Type) + if err != nil { + return fmt.Errorf("type validation for enum at index %v failed for property %s: %w", i, name, err) + } + } + } + + // Validate default value is contained in the list of enums if both are defined. + for name, property := range schema.Properties { + if property.Default == nil || property.Enum == nil { + continue + } + // We expect the default value to be consistent with the list of enum + // values. + if !slices.Contains(property.Enum, property.Default) { + return fmt.Errorf("list of enum values for property %s does not contain default value %v: %v", name, property.Default, property.Enum) + } + } return nil } @@ -115,6 +143,12 @@ func Load(path string) (*Schema, error) { return nil, fmt.Errorf("failed to parse default value for property %s: %w", name, err) } } + for i, enum := range property.Enum { + property.Enum[i], err = toInteger(enum) + if err != nil { + return nil, fmt.Errorf("failed to parse enum value %v at index %v for property %s: %w", enum, i, name, err) + } + } } return schema, schema.validate() diff --git a/libs/jsonschema/schema_test.go b/libs/jsonschema/schema_test.go index 5b92d8466..db559ea88 100644 --- a/libs/jsonschema/schema_test.go +++ b/libs/jsonschema/schema_test.go @@ -47,6 +47,7 @@ func TestSchemaLoadIntegers(t *testing.T) { schema, err := Load("./testdata/schema-load-int/schema-valid.json") assert.NoError(t, err) assert.Equal(t, int64(1), schema.Properties["abc"].Default) + assert.Equal(t, []any{int64(1), int64(2), int64(3)}, schema.Properties["abc"].Enum) } func TestSchemaLoadIntegersWithInvalidDefault(t *testing.T) { @@ -54,6 +55,11 @@ func TestSchemaLoadIntegersWithInvalidDefault(t *testing.T) { assert.EqualError(t, err, "failed to parse default value for property abc: expected integer value, got: 1.1") } +func TestSchemaLoadIntegersWithInvalidEnums(t *testing.T) { + _, err := Load("./testdata/schema-load-int/schema-invalid-enum.json") + assert.EqualError(t, err, "failed to parse enum value 2.4 at index 1 for property abc: expected integer value, got: 2.4") +} + func TestSchemaValidateDefaultType(t *testing.T) { invalidSchema := &Schema{ Properties: map[string]*Schema{ @@ -79,3 +85,57 @@ func TestSchemaValidateDefaultType(t *testing.T) { err = validSchema.validate() assert.NoError(t, err) } + +func TestSchemaValidateEnumType(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Enum: []any{true, "false"}, + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "type validation for enum at index 1 failed for property foo: expected type boolean, but value is \"false\"") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "boolean", + Enum: []any{true, false}, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} + +func TestSchemaValidateErrorWhenDefaultValueIsNotInEnums(t *testing.T) { + invalidSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "string", + Default: "abc", + Enum: []any{"def", "ghi"}, + }, + }, + } + + err := invalidSchema.validate() + assert.EqualError(t, err, "list of enum values for property foo does not contain default value abc: [def ghi]") + + validSchema := &Schema{ + Properties: map[string]*Schema{ + "foo": { + Type: "string", + Default: "abc", + Enum: []any{"def", "ghi", "abc"}, + }, + }, + } + + err = validSchema.validate() + assert.NoError(t, err) +} diff --git a/libs/jsonschema/testdata/instance-validate/test-schema-enum.json b/libs/jsonschema/testdata/instance-validate/test-schema-enum.json new file mode 100644 index 000000000..75ffd6eb8 --- /dev/null +++ b/libs/jsonschema/testdata/instance-validate/test-schema-enum.json @@ -0,0 +1,12 @@ +{ + "properties": { + "foo": { + "type": "string", + "enum": ["a", "b", "c"] + }, + "bar": { + "type": "integer", + "enum": [2,4,6] + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json b/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json new file mode 100644 index 000000000..5bd2b3f2b --- /dev/null +++ b/libs/jsonschema/testdata/schema-load-int/schema-invalid-enum.json @@ -0,0 +1,10 @@ +{ + "type": "object", + "properties": { + "abc": { + "type": "integer", + "default": 1, + "enum": [1,2.4,3] + } + } +} diff --git a/libs/jsonschema/testdata/schema-load-int/schema-valid.json b/libs/jsonschema/testdata/schema-load-int/schema-valid.json index 599ac04d0..a1167a6c9 100644 --- a/libs/jsonschema/testdata/schema-load-int/schema-valid.json +++ b/libs/jsonschema/testdata/schema-load-int/schema-valid.json @@ -3,7 +3,8 @@ "properties": { "abc": { "type": "integer", - "default": 1 + "default": 1, + "enum": [1,2,3] } } } diff --git a/libs/jsonschema/utils.go b/libs/jsonschema/utils.go index 21866965e..66db9603e 100644 --- a/libs/jsonschema/utils.go +++ b/libs/jsonschema/utils.go @@ -71,6 +71,18 @@ func ToString(v any, T Type) (string, error) { } } +func ToStringSlice(arr []any, T Type) ([]string, error) { + res := []string{} + for _, v := range arr { + s, err := ToString(v, T) + if err != nil { + return nil, err + } + res = append(res, s) + } + return res, nil +} + func FromString(s string, T Type) (any, error) { if T == StringType { return s, nil diff --git a/libs/jsonschema/utils_test.go b/libs/jsonschema/utils_test.go index 9686cf39b..29529aaa9 100644 --- a/libs/jsonschema/utils_test.go +++ b/libs/jsonschema/utils_test.go @@ -118,3 +118,13 @@ func TestTemplateFromString(t *testing.T) { _, err = FromString("1.0", "foobar") assert.EqualError(t, err, "unknown json schema type: \"foobar\"") } + +func TestTemplateToStringSlice(t *testing.T) { + s, err := ToStringSlice([]any{"a", "b", "c"}, StringType) + assert.NoError(t, err) + assert.Equal(t, []string{"a", "b", "c"}, s) + + s, err = ToStringSlice([]any{1.1, 2.2, 3.3}, NumberType) + assert.NoError(t, err) + assert.Equal(t, []string{"1.1", "2.2", "3.3"}, s) +} diff --git a/libs/template/config.go b/libs/template/config.go index 6f980f613..21618ac9a 100644 --- a/libs/template/config.go +++ b/libs/template/config.go @@ -102,9 +102,23 @@ func (c *config) promptForValues() error { } // Get user input by running the prompt - userInput, err := cmdio.Ask(c.ctx, property.Description, defaultVal) - if err != nil { - return err + var userInput string + if property.Enum != nil { + // convert list of enums to string slice + enums, err := jsonschema.ToStringSlice(property.Enum, property.Type) + if err != nil { + return err + } + userInput, err = cmdio.AskSelect(c.ctx, property.Description, enums) + if err != nil { + return err + } + } else { + userInput, err = cmdio.Ask(c.ctx, property.Description, defaultVal) + if err != nil { + return err + } + } // Convert user input string back to a value diff --git a/libs/template/config_test.go b/libs/template/config_test.go index bba22c758..1b1fc3383 100644 --- a/libs/template/config_test.go +++ b/libs/template/config_test.go @@ -142,3 +142,30 @@ func TestTemplateValidateSchema(t *testing.T) { err = validateSchema(toSchema("array")) assert.EqualError(t, err, "property type array is not supported by bundle templates") } + +func TestTemplateEnumValidation(t *testing.T) { + schema := jsonschema.Schema{ + Properties: map[string]*jsonschema.Schema{ + "abc": { + Type: "integer", + Enum: []any{1, 2, 3, 4}, + }, + }, + } + + c := &config{ + schema: &schema, + values: map[string]any{ + "abc": 5, + }, + } + assert.EqualError(t, c.validate(), "validation for template input parameters failed. expected value of property abc to be one of [1 2 3 4]. Found: 5") + + c = &config{ + schema: &schema, + values: map[string]any{ + "abc": 4, + }, + } + assert.NoError(t, c.validate()) +} From 18a5b05d82ce589c4bfb26ea50c8b99d52fe3f72 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 15:45:21 +0200 Subject: [PATCH 121/139] Apply Python wheel trampoline if workspace library is used (#755) ## Changes Workspace library will be detected by trampoline in 2 cases: - User defined to use local wheel file - User defined to use remote wheel file from Workspace file system In both of these cases we should correctly apply Python trampoline ## Tests Added a regression test (also covered by Python e2e test) --- bundle/artifacts/artifacts_test.go | 2 ++ bundle/config/artifact.go | 8 ++++++-- bundle/libraries/libraries.go | 11 +++++++++++ bundle/python/transform.go | 11 +++++++++-- bundle/python/transform_test.go | 2 +- 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/bundle/artifacts/artifacts_test.go b/bundle/artifacts/artifacts_test.go index 4c0a18f38..bbae44efa 100644 --- a/bundle/artifacts/artifacts_test.go +++ b/bundle/artifacts/artifacts_test.go @@ -105,6 +105,7 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { b.WorkspaceClient().Workspace.WithImpl(MockWorkspaceService{}) artifact := &config.Artifact{ + Type: "whl", Files: []config.ArtifactFile{ { Source: whlPath, @@ -118,4 +119,5 @@ func TestUploadArtifactFileToCorrectRemotePath(t *testing.T) { err := uploadArtifact(context.Background(), artifact, b) require.NoError(t, err) require.Regexp(t, regexp.MustCompile("/Users/test@databricks.com/whatever/.internal/[a-z0-9]+/test.whl"), artifact.Files[0].RemotePath) + require.Regexp(t, regexp.MustCompile("/Workspace/Users/test@databricks.com/whatever/.internal/[a-z0-9]+/test.whl"), artifact.Files[0].Libraries[0].Whl) } diff --git a/bundle/config/artifact.go b/bundle/config/artifact.go index 1955e265d..d7048a02e 100644 --- a/bundle/config/artifact.go +++ b/bundle/config/artifact.go @@ -78,9 +78,13 @@ func (a *Artifact) NormalisePaths() { remotePath := path.Join(wsfsBase, f.RemotePath) for i := range f.Libraries { lib := f.Libraries[i] - switch a.Type { - case ArtifactPythonWheel: + if lib.Whl != "" { lib.Whl = remotePath + continue + } + if lib.Jar != "" { + lib.Jar = remotePath + continue } } diff --git a/bundle/libraries/libraries.go b/bundle/libraries/libraries.go index d9a257db8..8e2e504c5 100644 --- a/bundle/libraries/libraries.go +++ b/bundle/libraries/libraries.go @@ -78,6 +78,17 @@ func IsTaskWithLocalLibraries(task *jobs.Task) bool { return false } +func IsTaskWithWorkspaceLibraries(task *jobs.Task) bool { + for _, l := range task.Libraries { + path := libPath(&l) + if isWorkspacePath(path) { + return true + } + } + + return false +} + func isMissingRequiredLibraries(task *jobs.Task) bool { if task.Libraries != nil { return false diff --git a/bundle/python/transform.go b/bundle/python/transform.go index 3d744df9d..d8eb33f54 100644 --- a/bundle/python/transform.go +++ b/bundle/python/transform.go @@ -73,8 +73,11 @@ func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { for i := range tasks { task := &tasks[i] - // Keep only Python wheel tasks with local libraries referenced - if task.PythonWheelTask == nil || !libraries.IsTaskWithLocalLibraries(task) { + // Keep only Python wheel tasks with workspace libraries referenced. + // At this point of moment we don't have local paths in Libraries sections anymore + // Local paths have been replaced with the remote when the artifacts where uploaded + // in artifacts.UploadAll mutator. + if task.PythonWheelTask == nil || !needsTrampoline(task) { continue } @@ -87,6 +90,10 @@ func (t *pythonTrampoline) GetTasks(b *bundle.Bundle) []mutator.TaskWithJobKey { return result } +func needsTrampoline(task *jobs.Task) bool { + return libraries.IsTaskWithWorkspaceLibraries(task) +} + func (t *pythonTrampoline) GetTemplateData(task *jobs.Task) (map[string]any, error) { params, err := t.generateParameters(task.PythonWheelTask) if err != nil { diff --git a/bundle/python/transform_test.go b/bundle/python/transform_test.go index 99d3129d8..a7448f234 100644 --- a/bundle/python/transform_test.go +++ b/bundle/python/transform_test.go @@ -84,7 +84,7 @@ func TestTransformFiltersWheelTasksOnly(t *testing.T) { TaskKey: "key1", PythonWheelTask: &jobs.PythonWheelTask{}, Libraries: []compute.Library{ - {Whl: "./dist/test.whl"}, + {Whl: "/Workspace/Users/test@test.com/bundle/dist/test.whl"}, }, }, { From b5d033d1542b3e8f235457b8e1ed8ddb2ab54555 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Fri, 8 Sep 2023 17:37:55 +0200 Subject: [PATCH 122/139] List available targets when incorrect target passed (#756) ## Changes List available targets when incorrect target passed ## Tests ``` andrew.nester@HFW9Y94129 wheel % databricks bundle validate -t incorrect Error: incorrect: no such target. Available targets: prod, development ``` --- bundle/config/mutator/select_target.go | 4 +++- bundle/tests/suggest_target_test.go | 17 +++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 bundle/tests/suggest_target_test.go diff --git a/bundle/config/mutator/select_target.go b/bundle/config/mutator/select_target.go index 3be1f2e1a..2ad431128 100644 --- a/bundle/config/mutator/select_target.go +++ b/bundle/config/mutator/select_target.go @@ -3,8 +3,10 @@ package mutator import ( "context" "fmt" + "strings" "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" ) type selectTarget struct { @@ -30,7 +32,7 @@ func (m *selectTarget) Apply(_ context.Context, b *bundle.Bundle) error { // Get specified target target, ok := b.Config.Targets[m.name] if !ok { - return fmt.Errorf("%s: no such target", m.name) + return fmt.Errorf("%s: no such target. Available targets: %s", m.name, strings.Join(maps.Keys(b.Config.Targets), ", ")) } // Merge specified target into root configuration structure. diff --git a/bundle/tests/suggest_target_test.go b/bundle/tests/suggest_target_test.go new file mode 100644 index 000000000..924d6a4e1 --- /dev/null +++ b/bundle/tests/suggest_target_test.go @@ -0,0 +1,17 @@ +package config_tests + +import ( + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/stretchr/testify/require" +) + +func TestSuggestTargetIfWrongPassed(t *testing.T) { + t.Setenv("BUNDLE_ROOT", filepath.Join("target_overrides", "workspace")) + _, _, err := internal.RequireErrorRun(t, "bundle", "validate", "-e", "incorrect") + require.ErrorContains(t, err, "Available targets:") + require.ErrorContains(t, err, "development") + require.ErrorContains(t, err, "staging") +} From d9a276b17de3b0771f231eab6de1c55be9d7725b Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Sat, 9 Sep 2023 23:55:43 +0200 Subject: [PATCH 123/139] Fix minor typos in default-python template (#754) Co-authored-by: Pieter Noordhuis --- .../template/{{.project_name}}/databricks.yml.tmpl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl index 48aef0ea3..7fbf4da4c 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/databricks.yml.tmpl @@ -7,10 +7,10 @@ include: - resources/*.yml targets: - # The 'dev' target, used development purposes. + # The 'dev' target, used for development purposes. # Whenever a developer deploys using 'dev', they get their own copy. dev: - # We use 'mode: development' to make everything deployed to this target gets a prefix + # We use 'mode: development' to make sure everything deployed to this target gets a prefix # like '[dev my_user_name]'. Setting this mode also disables any schedules and # automatic triggers for jobs and enables the 'development' mode for Delta Live Tables pipelines. mode: development From 9e56bed5935df915d5f8cb6c2b673a3ed7c3d462 Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Mon, 11 Sep 2023 09:36:44 +0200 Subject: [PATCH 124/139] Minor default template tweaks (#758) Minor template tweaks, mostly making the imports section for DLT notebooks a bit more elegant. Tested with DAB deployment + in-workspace UI. --- .../default-python/template/__preamble.tmpl | 2 +- .../template/{{.project_name}}/README.md.tmpl | 2 +- .../{{.project_name}}/fixtures/.gitkeep.tmpl | 2 +- .../{{.project_name}}_pipeline.yml.tmpl | 6 +++--- .../src/dlt_pipeline.ipynb.tmpl | 20 +++++-------------- .../{{.project_name}}/src/notebook.ipynb.tmpl | 2 +- 6 files changed, 12 insertions(+), 22 deletions(-) diff --git a/libs/template/templates/default-python/template/__preamble.tmpl b/libs/template/templates/default-python/template/__preamble.tmpl index 95c613332..a86d3bffd 100644 --- a/libs/template/templates/default-python/template/__preamble.tmpl +++ b/libs/template/templates/default-python/template/__preamble.tmpl @@ -17,7 +17,7 @@ This file only template directives; it is skipped for the actual output. {{if $notPython}} {{skip "{{.project_name}}/src/{{.project_name}}"}} - {{skip "{{.project_name}}/tests/test_main.py"}} + {{skip "{{.project_name}}/tests/main_test.py"}} {{skip "{{.project_name}}/setup.py"}} {{skip "{{.project_name}}/pytest.ini"}} {{end}} diff --git a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl index 7c8876e75..1bcd7af41 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/README.md.tmpl @@ -20,7 +20,7 @@ The '{{.project_name}}' project was generated by using the default-python templa This deploys everything that's defined for this project. For example, the default template would deploy a job called - `[dev yourname] {{.project_name}}-job` to your workspace. + `[dev yourname] {{.project_name}}_job` to your workspace. You can find that job by opening your workpace and clicking on **Workflows**. 4. Similarly, to deploy a production copy, type: diff --git a/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl index 361c681f9..ee9570302 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/fixtures/.gitkeep.tmpl @@ -17,7 +17,7 @@ def get_absolute_path(*relative_parts): if 'dbutils' in globals(): base_dir = os.path.dirname(dbutils.notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()) # type: ignore path = os.path.normpath(os.path.join(base_dir, *relative_parts)) - return path if path.startswith("/Workspace") else os.path.join("/Workspace", path) + return path if path.startswith("/Workspace") else "/Workspace" + path else: return os.path.join(*relative_parts) diff --git a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl index ffe400cb8..4b8f74d17 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/resources/{{.project_name}}_pipeline.yml.tmpl @@ -2,11 +2,11 @@ resources: pipelines: {{.project_name}}_pipeline: - name: "{{.project_name}}_pipeline" - target: "{{.project_name}}_${bundle.environment}" + name: {{.project_name}}_pipeline + target: {{.project_name}}_${bundle.environment} libraries: - notebook: path: ../src/dlt_pipeline.ipynb configuration: - "bundle.sourcePath": "/Workspace/${workspace.file_path}/src" + bundle.sourcePath: /Workspace/${workspace.file_path}/src diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl index 74893238e..4f50294f6 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/dlt_pipeline.ipynb.tmpl @@ -14,7 +14,7 @@ "source": [ "# DLT pipeline\n", "\n", - "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.my_project}}_pipeline.yml." + "This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/{{.project_name}}_pipeline.yml." ] }, { @@ -27,28 +27,18 @@ "nuid": "9198e987-5606-403d-9f6d-8f14e6a4017f", "showTitle": false, "title": "" - }, - "jupyter": { - {{- /* Collapse this cell by default. Just boring imports here! */}} - "source_hidden": true } }, "outputs": [], "source": [ {{- if (eq .include_python "yes") }} - "# Import DLT and make sure 'my_project' is on the Python path\n", + "# Import DLT and src/{{.project_name}}\n", "import dlt\n", - "from pyspark.sql.functions import expr\n", - "from pyspark.sql import SparkSession\n", - "spark = SparkSession.builder.getOrCreate()\n", "import sys\n", - "try:\n", - " sys.path.append(spark.conf.get(\"bundle.sourcePath\"))\n", - "except:\n", - " pass\n", - "from my_project import main" + "sys.path.append(spark.conf.get(\"bundle.sourcePath\", \".\"))\n", + "from pyspark.sql.functions import expr\n", + "from {{.project_name}} import main" {{else}} - "# Import DLT\n", "import dlt\n", "from pyspark.sql.functions import expr\n", "from pyspark.sql import SparkSession\n", diff --git a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl index 8423ecf8b..0ab61db2c 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/src/notebook.ipynb.tmpl @@ -14,7 +14,7 @@ "source": [ "# Default notebook\n", "\n", - "This default notebook is executed using Databricks Workflows as defined in resources/{{.my_project}}_job.yml." + "This default notebook is executed using Databricks Workflows as defined in resources/{{.project_name}}_job.yml." ] }, { From 9a51f72f0b86d7fc57c35392cbeba4c5ccb15650 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 11 Sep 2023 10:16:22 +0200 Subject: [PATCH 125/139] Make bundle and sync fields optional (#757) ## Changes This PR: 1. Makes the bundle and sync properties optional in the generated schema. 2. Fixes schema generation that was broken due to a rogue "description" field in the bundle docs. ## Tests Tested manually. The generated schema no longer has "bundle" and "sync" marked as required. --- bundle/config/root.go | 4 ++-- bundle/schema/docs/bundle_descriptions.json | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/bundle/config/root.go b/bundle/config/root.go index 99ea33ad6..0377f60a0 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -52,7 +52,7 @@ type Root struct { // Bundle contains details about this bundle, such as its name, // version of the spec (TODO), default cluster, default warehouse, etc. - Bundle Bundle `json:"bundle"` + Bundle Bundle `json:"bundle,omitempty"` // Include specifies a list of patterns of file names to load and // merge into the this configuration. Only includes defined in the root @@ -80,7 +80,7 @@ type Root struct { Environments map[string]*Target `json:"environments,omitempty"` // Sync section specifies options for files synchronization - Sync Sync `json:"sync"` + Sync Sync `json:"sync,omitempty"` // RunAs section allows to define an execution identity for jobs and pipelines runs RunAs *jobs.JobRunAs `json:"run_as,omitempty"` diff --git a/bundle/schema/docs/bundle_descriptions.json b/bundle/schema/docs/bundle_descriptions.json index ffdb56298..98f3cf8d0 100644 --- a/bundle/schema/docs/bundle_descriptions.json +++ b/bundle/schema/docs/bundle_descriptions.json @@ -1472,7 +1472,6 @@ "config": { "description": "The model serving endpoint configuration.", "properties": { - "description": "", "properties": { "served_models": { "description": "Each block represents a served model for the endpoint to serve. A model serving endpoint can have up to 10 served models.", From 4ccc70aeaca336ceac0aebb924e428dac38eb84f Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 11 Sep 2023 10:18:43 +0200 Subject: [PATCH 126/139] Consolidate environment variable interaction (#747) ## Changes There are a couple places throughout the code base where interaction with environment variables takes place. Moreover, more than one of these would try to read a value from more than one environment variable as fallback (for backwards compatibility). This change consolidates those accesses. The majority of diffs in this change are mechanical (i.e. add an argument or replace a call). This change: * Moves common environment variable lookups for bundles to `bundles/env`. * Adds a `libs/env` package that wraps `os.LookupEnv` and `os.Getenv` and allows for overrides to take place in a `context.Context`. By scoping overrides to a `context.Context` we can avoid `t.Setenv` in testing and unlock parallel test execution for integration tests. * Updates call sites to pass through a `context.Context` where needed. * For bundles, introduces `DATABRICKS_BUNDLE_ROOT` as new primary variable instead of `BUNDLE_ROOT`. This was the last environment variable that did not use the `DATABRICKS_` prefix. ## Tests Unit tests pass. --- bundle/bundle.go | 26 ++++---- bundle/bundle_test.go | 19 +++--- bundle/config/mutator/override_compute.go | 6 +- .../config/mutator/process_root_includes.go | 7 ++- .../mutator/process_root_includes_test.go | 16 +++-- bundle/config/mutator/set_variables.go | 8 +-- bundle/config/mutator/set_variables_test.go | 10 +-- bundle/config/mutator/trampoline.go | 6 +- bundle/config/mutator/trampoline_test.go | 2 +- bundle/deploy/files/sync.go | 4 +- bundle/deploy/terraform/dir.go | 6 +- bundle/deploy/terraform/init.go | 59 ++++++++--------- bundle/deploy/terraform/init_test.go | 22 +++---- bundle/deploy/terraform/plan.go | 2 +- bundle/deploy/terraform/state_pull.go | 2 +- bundle/deploy/terraform/state_push.go | 2 +- bundle/deploy/terraform/write.go | 2 +- bundle/env/env.go | 18 ++++++ bundle/env/env_test.go | 44 +++++++++++++ bundle/env/includes.go | 14 +++++ bundle/env/includes_test.go | 28 +++++++++ bundle/env/root.go | 16 +++++ bundle/env/root_test.go | 43 +++++++++++++ bundle/env/target.go | 17 +++++ bundle/env/target_test.go | 43 +++++++++++++ bundle/env/temp_dir.go | 13 ++++ bundle/env/temp_dir_test.go | 28 +++++++++ bundle/root.go | 20 +++--- bundle/root_test.go | 59 ++++++++++------- cmd/bundle/sync.go | 4 +- cmd/cmd.go | 5 +- cmd/configure/configure_test.go | 6 +- cmd/root/bundle.go | 16 ++--- cmd/root/io.go | 5 +- cmd/root/logger.go | 8 +-- cmd/root/progress_logger.go | 3 +- cmd/root/root.go | 6 +- cmd/root/user_agent_upstream.go | 6 +- cmd/sync/sync.go | 4 +- internal/helpers.go | 2 +- internal/testutil/env.go | 33 ++++++++++ libs/env/context.go | 63 +++++++++++++++++++ libs/env/context_test.go | 41 ++++++++++++ libs/env/pkg.go | 7 +++ main.go | 4 +- main_test.go | 3 +- 46 files changed, 594 insertions(+), 164 deletions(-) create mode 100644 bundle/env/env.go create mode 100644 bundle/env/env_test.go create mode 100644 bundle/env/includes.go create mode 100644 bundle/env/includes_test.go create mode 100644 bundle/env/root.go create mode 100644 bundle/env/root_test.go create mode 100644 bundle/env/target.go create mode 100644 bundle/env/target_test.go create mode 100644 bundle/env/temp_dir.go create mode 100644 bundle/env/temp_dir_test.go create mode 100644 internal/testutil/env.go create mode 100644 libs/env/context.go create mode 100644 libs/env/context_test.go create mode 100644 libs/env/pkg.go diff --git a/bundle/bundle.go b/bundle/bundle.go index 8175ce283..4fc605398 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -14,6 +14,7 @@ import ( "sync" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/folders" "github.com/databricks/cli/libs/git" "github.com/databricks/cli/libs/locker" @@ -51,8 +52,6 @@ type Bundle struct { AutoApprove bool } -const ExtraIncludePathsKey string = "DATABRICKS_BUNDLE_INCLUDES" - func Load(ctx context.Context, path string) (*Bundle, error) { bundle := &Bundle{} stat, err := os.Stat(path) @@ -61,9 +60,9 @@ func Load(ctx context.Context, path string) (*Bundle, error) { } configFile, err := config.FileNames.FindInPath(path) if err != nil { - _, hasIncludePathEnv := os.LookupEnv(ExtraIncludePathsKey) - _, hasBundleRootEnv := os.LookupEnv(envBundleRoot) - if hasIncludePathEnv && hasBundleRootEnv && stat.IsDir() { + _, hasRootEnv := env.Root(ctx) + _, hasIncludesEnv := env.Includes(ctx) + if hasRootEnv && hasIncludesEnv && stat.IsDir() { log.Debugf(ctx, "No bundle configuration; using bundle root: %s", path) bundle.Config = config.Root{ Path: path, @@ -86,7 +85,7 @@ func Load(ctx context.Context, path string) (*Bundle, error) { // MustLoad returns a bundle configuration. // It returns an error if a bundle was not found or could not be loaded. func MustLoad(ctx context.Context) (*Bundle, error) { - root, err := mustGetRoot() + root, err := mustGetRoot(ctx) if err != nil { return nil, err } @@ -98,7 +97,7 @@ func MustLoad(ctx context.Context) (*Bundle, error) { // It returns an error if a bundle was found but could not be loaded. // It returns a `nil` bundle if a bundle was not found. func TryLoad(ctx context.Context) (*Bundle, error) { - root, err := tryGetRoot() + root, err := tryGetRoot(ctx) if err != nil { return nil, err } @@ -124,13 +123,12 @@ func (b *Bundle) WorkspaceClient() *databricks.WorkspaceClient { // CacheDir returns directory to use for temporary files for this bundle. // Scoped to the bundle's target. -func (b *Bundle) CacheDir(paths ...string) (string, error) { +func (b *Bundle) CacheDir(ctx context.Context, paths ...string) (string, error) { if b.Config.Bundle.Target == "" { panic("target not set") } - cacheDirName, exists := os.LookupEnv("DATABRICKS_BUNDLE_TMP") - + cacheDirName, exists := env.TempDir(ctx) if !exists || cacheDirName == "" { cacheDirName = filepath.Join( // Anchor at bundle root directory. @@ -163,8 +161,8 @@ func (b *Bundle) CacheDir(paths ...string) (string, error) { // This directory is used to store and automaticaly sync internal bundle files, such as, f.e // notebook trampoline files for Python wheel and etc. -func (b *Bundle) InternalDir() (string, error) { - cacheDir, err := b.CacheDir() +func (b *Bundle) InternalDir(ctx context.Context) (string, error) { + cacheDir, err := b.CacheDir(ctx) if err != nil { return "", err } @@ -181,8 +179,8 @@ func (b *Bundle) InternalDir() (string, error) { // GetSyncIncludePatterns returns a list of user defined includes // And also adds InternalDir folder to include list for sync command // so this folder is always synced -func (b *Bundle) GetSyncIncludePatterns() ([]string, error) { - internalDir, err := b.InternalDir() +func (b *Bundle) GetSyncIncludePatterns(ctx context.Context) ([]string, error) { + internalDir, err := b.InternalDir(ctx) if err != nil { return nil, err } diff --git a/bundle/bundle_test.go b/bundle/bundle_test.go index 4a3e7f2c9..43477efd1 100644 --- a/bundle/bundle_test.go +++ b/bundle/bundle_test.go @@ -6,6 +6,7 @@ import ( "path/filepath" "testing" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -23,12 +24,13 @@ func TestLoadExists(t *testing.T) { } func TestBundleCacheDir(t *testing.T) { + ctx := context.Background() projectDir := t.TempDir() f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() - bundle, err := Load(context.Background(), projectDir) + bundle, err := Load(ctx, projectDir) require.NoError(t, err) // Artificially set target. @@ -38,7 +40,7 @@ func TestBundleCacheDir(t *testing.T) { // unset env variable in case it's set t.Setenv("DATABRICKS_BUNDLE_TMP", "") - cacheDir, err := bundle.CacheDir() + cacheDir, err := bundle.CacheDir(ctx) // format is /.databricks/bundle/ assert.NoError(t, err) @@ -46,13 +48,14 @@ func TestBundleCacheDir(t *testing.T) { } func TestBundleCacheDirOverride(t *testing.T) { + ctx := context.Background() projectDir := t.TempDir() bundleTmpDir := t.TempDir() f1, err := os.Create(filepath.Join(projectDir, "databricks.yml")) require.NoError(t, err) f1.Close() - bundle, err := Load(context.Background(), projectDir) + bundle, err := Load(ctx, projectDir) require.NoError(t, err) // Artificially set target. @@ -62,7 +65,7 @@ func TestBundleCacheDirOverride(t *testing.T) { // now we expect to use 'bundleTmpDir' instead of CWD/.databricks/bundle t.Setenv("DATABRICKS_BUNDLE_TMP", bundleTmpDir) - cacheDir, err := bundle.CacheDir() + cacheDir, err := bundle.CacheDir(ctx) // format is / assert.NoError(t, err) @@ -70,14 +73,14 @@ func TestBundleCacheDirOverride(t *testing.T) { } func TestBundleMustLoadSuccess(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/basic") + t.Setenv(env.RootVariable, "./tests/basic") b, err := MustLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleMustLoadFailureWithEnv(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/doesntexist") + t.Setenv(env.RootVariable, "./tests/doesntexist") _, err := MustLoad(context.Background()) require.Error(t, err, "not a directory") } @@ -89,14 +92,14 @@ func TestBundleMustLoadFailureIfNotFound(t *testing.T) { } func TestBundleTryLoadSuccess(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/basic") + t.Setenv(env.RootVariable, "./tests/basic") b, err := TryLoad(context.Background()) require.NoError(t, err) assert.Equal(t, "tests/basic", filepath.ToSlash(b.Config.Path)) } func TestBundleTryLoadFailureWithEnv(t *testing.T) { - t.Setenv(envBundleRoot, "./tests/doesntexist") + t.Setenv(env.RootVariable, "./tests/doesntexist") _, err := TryLoad(context.Background()) require.Error(t, err, "not a directory") } diff --git a/bundle/config/mutator/override_compute.go b/bundle/config/mutator/override_compute.go index ee2e2a825..21d950135 100644 --- a/bundle/config/mutator/override_compute.go +++ b/bundle/config/mutator/override_compute.go @@ -3,11 +3,11 @@ package mutator import ( "context" "fmt" - "os" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/resources" + "github.com/databricks/cli/libs/env" ) type overrideCompute struct{} @@ -39,8 +39,8 @@ func (m *overrideCompute) Apply(ctx context.Context, b *bundle.Bundle) error { } return nil } - if os.Getenv("DATABRICKS_CLUSTER_ID") != "" { - b.Config.Bundle.ComputeID = os.Getenv("DATABRICKS_CLUSTER_ID") + if v := env.Get(ctx, "DATABRICKS_CLUSTER_ID"); v != "" { + b.Config.Bundle.ComputeID = v } if b.Config.Bundle.ComputeID == "" { diff --git a/bundle/config/mutator/process_root_includes.go b/bundle/config/mutator/process_root_includes.go index 989928721..5a5ab1b19 100644 --- a/bundle/config/mutator/process_root_includes.go +++ b/bundle/config/mutator/process_root_includes.go @@ -10,11 +10,12 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" ) // Get extra include paths from environment variable -func GetExtraIncludePaths() []string { - value, exists := os.LookupEnv(bundle.ExtraIncludePathsKey) +func getExtraIncludePaths(ctx context.Context) []string { + value, exists := env.Includes(ctx) if !exists { return nil } @@ -48,7 +49,7 @@ func (m *processRootIncludes) Apply(ctx context.Context, b *bundle.Bundle) error var files []string // Converts extra include paths from environment variable to relative paths - for _, extraIncludePath := range GetExtraIncludePaths() { + for _, extraIncludePath := range getExtraIncludePaths(ctx) { if filepath.IsAbs(extraIncludePath) { rel, err := filepath.Rel(b.Config.Path, extraIncludePath) if err != nil { diff --git a/bundle/config/mutator/process_root_includes_test.go b/bundle/config/mutator/process_root_includes_test.go index 1ce094bc3..aec9b32df 100644 --- a/bundle/config/mutator/process_root_includes_test.go +++ b/bundle/config/mutator/process_root_includes_test.go @@ -2,16 +2,17 @@ package mutator_test import ( "context" - "fmt" "os" "path" "path/filepath" "runtime" + "strings" "testing" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -129,10 +130,7 @@ func TestProcessRootIncludesExtrasFromEnvVar(t *testing.T) { rootPath := t.TempDir() testYamlName := "extra_include_path.yml" touch(t, rootPath, testYamlName) - os.Setenv(bundle.ExtraIncludePathsKey, path.Join(rootPath, testYamlName)) - t.Cleanup(func() { - os.Unsetenv(bundle.ExtraIncludePathsKey) - }) + t.Setenv(env.IncludesVariable, path.Join(rootPath, testYamlName)) bundle := &bundle.Bundle{ Config: config.Root{ @@ -149,7 +147,13 @@ func TestProcessRootIncludesDedupExtrasFromEnvVar(t *testing.T) { rootPath := t.TempDir() testYamlName := "extra_include_path.yml" touch(t, rootPath, testYamlName) - t.Setenv(bundle.ExtraIncludePathsKey, fmt.Sprintf("%s%s%s", path.Join(rootPath, testYamlName), string(os.PathListSeparator), path.Join(rootPath, testYamlName))) + t.Setenv(env.IncludesVariable, strings.Join( + []string{ + path.Join(rootPath, testYamlName), + path.Join(rootPath, testYamlName), + }, + string(os.PathListSeparator), + )) bundle := &bundle.Bundle{ Config: config.Root{ diff --git a/bundle/config/mutator/set_variables.go b/bundle/config/mutator/set_variables.go index 427b6dce4..4bf8ff82a 100644 --- a/bundle/config/mutator/set_variables.go +++ b/bundle/config/mutator/set_variables.go @@ -3,10 +3,10 @@ package mutator import ( "context" "fmt" - "os" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/variable" + "github.com/databricks/cli/libs/env" ) const bundleVarPrefix = "BUNDLE_VAR_" @@ -21,7 +21,7 @@ func (m *setVariables) Name() string { return "SetVariables" } -func setVariable(v *variable.Variable, name string) error { +func setVariable(ctx context.Context, v *variable.Variable, name string) error { // case: variable already has value initialized, so skip if v.HasValue() { return nil @@ -29,7 +29,7 @@ func setVariable(v *variable.Variable, name string) error { // case: read and set variable value from process environment envVarName := bundleVarPrefix + name - if val, ok := os.LookupEnv(envVarName); ok { + if val, ok := env.Lookup(ctx, envVarName); ok { err := v.Set(val) if err != nil { return fmt.Errorf(`failed to assign value "%s" to variable %s from environment variable %s with error: %w`, val, name, envVarName, err) @@ -54,7 +54,7 @@ func setVariable(v *variable.Variable, name string) error { func (m *setVariables) Apply(ctx context.Context, b *bundle.Bundle) error { for name, variable := range b.Config.Variables { - err := setVariable(variable, name) + err := setVariable(ctx, variable, name) if err != nil { return err } diff --git a/bundle/config/mutator/set_variables_test.go b/bundle/config/mutator/set_variables_test.go index 91948aa4b..323f1e864 100644 --- a/bundle/config/mutator/set_variables_test.go +++ b/bundle/config/mutator/set_variables_test.go @@ -21,7 +21,7 @@ func TestSetVariableFromProcessEnvVar(t *testing.T) { // set value for variable as an environment variable t.Setenv("BUNDLE_VAR_foo", "process-env") - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "process-env") } @@ -33,7 +33,7 @@ func TestSetVariableUsingDefaultValue(t *testing.T) { Default: &defaultVal, } - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "default") } @@ -49,7 +49,7 @@ func TestSetVariableWhenAlreadyAValueIsAssigned(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the default value - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "assigned-value") } @@ -68,7 +68,7 @@ func TestSetVariableEnvVarValueDoesNotOverridePresetValue(t *testing.T) { // since a value is already assigned to the variable, it would not be overridden // by the value from environment - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") require.NoError(t, err) assert.Equal(t, *variable.Value, "assigned-value") } @@ -79,7 +79,7 @@ func TestSetVariablesErrorsIfAValueCouldNotBeResolved(t *testing.T) { } // fails because we could not resolve a value for the variable - err := setVariable(&variable, "foo") + err := setVariable(context.Background(), &variable, "foo") assert.ErrorContains(t, err, "no value assigned to required variable foo. Assignment can be done through the \"--var\" flag or by setting the BUNDLE_VAR_foo environment variable") } diff --git a/bundle/config/mutator/trampoline.go b/bundle/config/mutator/trampoline.go index 7c06c7fa6..52d62c1ba 100644 --- a/bundle/config/mutator/trampoline.go +++ b/bundle/config/mutator/trampoline.go @@ -43,7 +43,7 @@ func (m *trampoline) Name() string { func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { tasks := m.functions.GetTasks(b) for _, task := range tasks { - err := m.generateNotebookWrapper(b, task) + err := m.generateNotebookWrapper(ctx, b, task) if err != nil { return err } @@ -51,8 +51,8 @@ func (m *trampoline) Apply(ctx context.Context, b *bundle.Bundle) error { return nil } -func (m *trampoline) generateNotebookWrapper(b *bundle.Bundle, task TaskWithJobKey) error { - internalDir, err := b.InternalDir() +func (m *trampoline) generateNotebookWrapper(ctx context.Context, b *bundle.Bundle, task TaskWithJobKey) error { + internalDir, err := b.InternalDir(ctx) if err != nil { return err } diff --git a/bundle/config/mutator/trampoline_test.go b/bundle/config/mutator/trampoline_test.go index aec58618c..a3e06b303 100644 --- a/bundle/config/mutator/trampoline_test.go +++ b/bundle/config/mutator/trampoline_test.go @@ -83,7 +83,7 @@ func TestGenerateTrampoline(t *testing.T) { err := bundle.Apply(ctx, b, trampoline) require.NoError(t, err) - dir, err := b.InternalDir() + dir, err := b.InternalDir(ctx) require.NoError(t, err) filename := filepath.Join(dir, "notebook_test_to_trampoline.py") diff --git a/bundle/deploy/files/sync.go b/bundle/deploy/files/sync.go index 2dccd20a7..ff3d78d07 100644 --- a/bundle/deploy/files/sync.go +++ b/bundle/deploy/files/sync.go @@ -9,12 +9,12 @@ import ( ) func getSync(ctx context.Context, b *bundle.Bundle) (*sync.Sync, error) { - cacheDir, err := b.CacheDir() + cacheDir, err := b.CacheDir(ctx) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } - includes, err := b.GetSyncIncludePatterns() + includes, err := b.GetSyncIncludePatterns(ctx) if err != nil { return nil, fmt.Errorf("cannot get list of sync includes: %w", err) } diff --git a/bundle/deploy/terraform/dir.go b/bundle/deploy/terraform/dir.go index 9f83b8da5..b7b086ceb 100644 --- a/bundle/deploy/terraform/dir.go +++ b/bundle/deploy/terraform/dir.go @@ -1,11 +1,13 @@ package terraform import ( + "context" + "github.com/databricks/cli/bundle" ) // Dir returns the Terraform working directory for a given bundle. // The working directory is emphemeral and nested under the bundle's cache directory. -func Dir(b *bundle.Bundle) (string, error) { - return b.CacheDir("terraform") +func Dir(ctx context.Context, b *bundle.Bundle) (string, error) { + return b.CacheDir(ctx, "terraform") } diff --git a/bundle/deploy/terraform/init.go b/bundle/deploy/terraform/init.go index 60f0a6c4f..aa1dff74e 100644 --- a/bundle/deploy/terraform/init.go +++ b/bundle/deploy/terraform/init.go @@ -12,6 +12,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/log" "github.com/hashicorp/go-version" "github.com/hashicorp/hc-install/product" @@ -38,7 +39,7 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con return tf.ExecPath, nil } - binDir, err := b.CacheDir("bin") + binDir, err := b.CacheDir(context.Background(), "bin") if err != nil { return "", err } @@ -73,25 +74,25 @@ func (m *initialize) findExecPath(ctx context.Context, b *bundle.Bundle, tf *con } // This function inherits some environment variables for Terraform CLI. -func inheritEnvVars(env map[string]string) error { +func inheritEnvVars(ctx context.Context, environ map[string]string) error { // Include $HOME in set of environment variables to pass along. - home, ok := os.LookupEnv("HOME") + home, ok := env.Lookup(ctx, "HOME") if ok { - env["HOME"] = home + environ["HOME"] = home } // Include $PATH in set of environment variables to pass along. // This is necessary to ensure that our Terraform provider can use the // same auxiliary programs (e.g. `az`, or `gcloud`) as the CLI. - path, ok := os.LookupEnv("PATH") + path, ok := env.Lookup(ctx, "PATH") if ok { - env["PATH"] = path + environ["PATH"] = path } // Include $TF_CLI_CONFIG_FILE to override terraform provider in development. - configFile, ok := os.LookupEnv("TF_CLI_CONFIG_FILE") + configFile, ok := env.Lookup(ctx, "TF_CLI_CONFIG_FILE") if ok { - env["TF_CLI_CONFIG_FILE"] = configFile + environ["TF_CLI_CONFIG_FILE"] = configFile } return nil @@ -105,40 +106,40 @@ func inheritEnvVars(env map[string]string) error { // the CLI and its dependencies do not have access to. // // see: os.TempDir for more context -func setTempDirEnvVars(env map[string]string, b *bundle.Bundle) error { +func setTempDirEnvVars(ctx context.Context, environ map[string]string, b *bundle.Bundle) error { switch runtime.GOOS { case "windows": - if v, ok := os.LookupEnv("TMP"); ok { - env["TMP"] = v - } else if v, ok := os.LookupEnv("TEMP"); ok { - env["TEMP"] = v - } else if v, ok := os.LookupEnv("USERPROFILE"); ok { - env["USERPROFILE"] = v + if v, ok := env.Lookup(ctx, "TMP"); ok { + environ["TMP"] = v + } else if v, ok := env.Lookup(ctx, "TEMP"); ok { + environ["TEMP"] = v + } else if v, ok := env.Lookup(ctx, "USERPROFILE"); ok { + environ["USERPROFILE"] = v } else { - tmpDir, err := b.CacheDir("tmp") + tmpDir, err := b.CacheDir(ctx, "tmp") if err != nil { return err } - env["TMP"] = tmpDir + environ["TMP"] = tmpDir } default: // If TMPDIR is not set, we let the process fall back to its default value. - if v, ok := os.LookupEnv("TMPDIR"); ok { - env["TMPDIR"] = v + if v, ok := env.Lookup(ctx, "TMPDIR"); ok { + environ["TMPDIR"] = v } } return nil } // This function passes through all proxy related environment variables. -func setProxyEnvVars(env map[string]string, b *bundle.Bundle) error { +func setProxyEnvVars(ctx context.Context, environ map[string]string, b *bundle.Bundle) error { for _, v := range []string{"http_proxy", "https_proxy", "no_proxy"} { // The case (upper or lower) is notoriously inconsistent for tools on Unix systems. // We therefore try to read both the upper and lower case versions of the variable. for _, v := range []string{strings.ToUpper(v), strings.ToLower(v)} { - if val, ok := os.LookupEnv(v); ok { + if val, ok := env.Lookup(ctx, v); ok { // Only set uppercase version of the variable. - env[strings.ToUpper(v)] = val + environ[strings.ToUpper(v)] = val } } } @@ -157,7 +158,7 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - workingDir, err := Dir(b) + workingDir, err := Dir(ctx, b) if err != nil { return err } @@ -167,31 +168,31 @@ func (m *initialize) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - env, err := b.AuthEnv() + environ, err := b.AuthEnv() if err != nil { return err } - err = inheritEnvVars(env) + err = inheritEnvVars(ctx, environ) if err != nil { return err } // Set the temporary directory environment variables - err = setTempDirEnvVars(env, b) + err = setTempDirEnvVars(ctx, environ, b) if err != nil { return err } // Set the proxy related environment variables - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(ctx, environ, b) if err != nil { return err } // Configure environment variables for auth for Terraform to use. - log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(env), ", ")) - err = tf.SetEnv(env) + log.Debugf(ctx, "Environment variables for Terraform: %s", strings.Join(maps.Keys(environ), ", ")) + err = tf.SetEnv(environ) if err != nil { return err } diff --git a/bundle/deploy/terraform/init_test.go b/bundle/deploy/terraform/init_test.go index b94593878..001e7a220 100644 --- a/bundle/deploy/terraform/init_test.go +++ b/bundle/deploy/terraform/init_test.go @@ -68,7 +68,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // Assert that we pass through TMPDIR. @@ -96,7 +96,7 @@ func TestSetTempDirEnvVarsForUnixWithTmpDirNotSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // Assert that we don't pass through TMPDIR. @@ -124,7 +124,7 @@ func TestSetTempDirEnvVarsForWindowWithAllTmpDirEnvVarsSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the highest priority env var value @@ -154,7 +154,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileAndTempSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the highest priority env var value @@ -184,7 +184,7 @@ func TestSetTempDirEnvVarsForWindowWithUserProfileSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert that we pass through the user profile @@ -214,11 +214,11 @@ func TestSetTempDirEnvVarsForWindowsWithoutAnyTempDirEnvVarsSet(t *testing.T) { // compute env env := make(map[string]string, 0) - err := setTempDirEnvVars(env, b) + err := setTempDirEnvVars(context.Background(), env, b) require.NoError(t, err) // assert TMP is set to b.CacheDir("tmp") - tmpDir, err := b.CacheDir("tmp") + tmpDir, err := b.CacheDir(context.Background(), "tmp") require.NoError(t, err) assert.Equal(t, map[string]string{ "TMP": tmpDir, @@ -248,7 +248,7 @@ func TestSetProxyEnvVars(t *testing.T) { // No proxy env vars set. clearEnv() env := make(map[string]string, 0) - err := setProxyEnvVars(env, b) + err := setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.Len(t, env, 0) @@ -258,7 +258,7 @@ func TestSetProxyEnvVars(t *testing.T) { t.Setenv("https_proxy", "foo") t.Setenv("no_proxy", "foo") env = make(map[string]string, 0) - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) @@ -268,7 +268,7 @@ func TestSetProxyEnvVars(t *testing.T) { t.Setenv("HTTPS_PROXY", "foo") t.Setenv("NO_PROXY", "foo") env = make(map[string]string, 0) - err = setProxyEnvVars(env, b) + err = setProxyEnvVars(context.Background(), env, b) require.NoError(t, err) assert.ElementsMatch(t, []string{"HTTP_PROXY", "HTTPS_PROXY", "NO_PROXY"}, maps.Keys(env)) } @@ -280,7 +280,7 @@ func TestInheritEnvVars(t *testing.T) { t.Setenv("PATH", "/foo:/bar") t.Setenv("TF_CLI_CONFIG_FILE", "/tmp/config.tfrc") - err := inheritEnvVars(env) + err := inheritEnvVars(context.Background(), env) require.NoError(t, err) diff --git a/bundle/deploy/terraform/plan.go b/bundle/deploy/terraform/plan.go index a725b4aa9..ff841148c 100644 --- a/bundle/deploy/terraform/plan.go +++ b/bundle/deploy/terraform/plan.go @@ -40,7 +40,7 @@ func (p *plan) Apply(ctx context.Context, b *bundle.Bundle) error { } // Persist computed plan - tfDir, err := Dir(b) + tfDir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/deploy/terraform/state_pull.go b/bundle/deploy/terraform/state_pull.go index e5a42d89b..6dd12ccfc 100644 --- a/bundle/deploy/terraform/state_pull.go +++ b/bundle/deploy/terraform/state_pull.go @@ -25,7 +25,7 @@ func (l *statePull) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/deploy/terraform/state_push.go b/bundle/deploy/terraform/state_push.go index 0cd69e522..ae1d8b8b3 100644 --- a/bundle/deploy/terraform/state_push.go +++ b/bundle/deploy/terraform/state_push.go @@ -22,7 +22,7 @@ func (l *statePush) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/deploy/terraform/write.go b/bundle/deploy/terraform/write.go index 0bf9ab24a..eca79ad21 100644 --- a/bundle/deploy/terraform/write.go +++ b/bundle/deploy/terraform/write.go @@ -16,7 +16,7 @@ func (w *write) Name() string { } func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { - dir, err := Dir(b) + dir, err := Dir(ctx, b) if err != nil { return err } diff --git a/bundle/env/env.go b/bundle/env/env.go new file mode 100644 index 000000000..ed2a13c75 --- /dev/null +++ b/bundle/env/env.go @@ -0,0 +1,18 @@ +package env + +import ( + "context" + + envlib "github.com/databricks/cli/libs/env" +) + +// Return the value of the first environment variable that is set. +func get(ctx context.Context, variables []string) (string, bool) { + for _, v := range variables { + value, ok := envlib.Lookup(ctx, v) + if ok { + return value, true + } + } + return "", false +} diff --git a/bundle/env/env_test.go b/bundle/env/env_test.go new file mode 100644 index 000000000..d900242e0 --- /dev/null +++ b/bundle/env/env_test.go @@ -0,0 +1,44 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestGetWithRealEnvSingleVariable(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("v1", "foo") + + v, ok := get(context.Background(), []string{"v1"}) + require.True(t, ok) + assert.Equal(t, "foo", v) + + // Not set. + v, ok = get(context.Background(), []string{"v2"}) + require.False(t, ok) + assert.Equal(t, "", v) +} + +func TestGetWithRealEnvMultipleVariables(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("v1", "foo") + + for _, vars := range [][]string{ + {"v1", "v2", "v3"}, + {"v2", "v3", "v1"}, + {"v3", "v1", "v2"}, + } { + v, ok := get(context.Background(), vars) + require.True(t, ok) + assert.Equal(t, "foo", v) + } + + // Not set. + v, ok := get(context.Background(), []string{"v2", "v3", "v4"}) + require.False(t, ok) + assert.Equal(t, "", v) +} diff --git a/bundle/env/includes.go b/bundle/env/includes.go new file mode 100644 index 000000000..4ade01877 --- /dev/null +++ b/bundle/env/includes.go @@ -0,0 +1,14 @@ +package env + +import "context" + +// IncludesVariable names the environment variable that holds additional configuration paths to include +// during bundle configuration loading. Also see `bundle/config/mutator/process_root_includes.go`. +const IncludesVariable = "DATABRICKS_BUNDLE_INCLUDES" + +// Includes returns the bundle Includes environment variable. +func Includes(ctx context.Context) (string, bool) { + return get(ctx, []string{ + IncludesVariable, + }) +} diff --git a/bundle/env/includes_test.go b/bundle/env/includes_test.go new file mode 100644 index 000000000..d9366a59f --- /dev/null +++ b/bundle/env/includes_test.go @@ -0,0 +1,28 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestIncludes(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_INCLUDES", "foo") + includes, ok := Includes(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", includes) + }) + + t.Run("not set", func(t *testing.T) { + includes, ok := Includes(ctx) + assert.False(t, ok) + assert.Equal(t, "", includes) + }) +} diff --git a/bundle/env/root.go b/bundle/env/root.go new file mode 100644 index 000000000..e3c2a38ad --- /dev/null +++ b/bundle/env/root.go @@ -0,0 +1,16 @@ +package env + +import "context" + +// RootVariable names the environment variable that holds the bundle root path. +const RootVariable = "DATABRICKS_BUNDLE_ROOT" + +// Root returns the bundle root environment variable. +func Root(ctx context.Context) (string, bool) { + return get(ctx, []string{ + RootVariable, + + // Primary variable name for the bundle root until v0.204.0. + "BUNDLE_ROOT", + }) +} diff --git a/bundle/env/root_test.go b/bundle/env/root_test.go new file mode 100644 index 000000000..fc2d6e206 --- /dev/null +++ b/bundle/env/root_test.go @@ -0,0 +1,43 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestRoot(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("first", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ROOT", "foo") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", root) + }) + + t.Run("second", func(t *testing.T) { + t.Setenv("BUNDLE_ROOT", "foo") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", root) + }) + + t.Run("both set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ROOT", "first") + t.Setenv("BUNDLE_ROOT", "second") + root, ok := Root(ctx) + assert.True(t, ok) + assert.Equal(t, "first", root) + }) + + t.Run("not set", func(t *testing.T) { + root, ok := Root(ctx) + assert.False(t, ok) + assert.Equal(t, "", root) + }) +} diff --git a/bundle/env/target.go b/bundle/env/target.go new file mode 100644 index 000000000..ac3b48877 --- /dev/null +++ b/bundle/env/target.go @@ -0,0 +1,17 @@ +package env + +import "context" + +// TargetVariable names the environment variable that holds the bundle target to use. +const TargetVariable = "DATABRICKS_BUNDLE_TARGET" + +// Target returns the bundle target environment variable. +func Target(ctx context.Context) (string, bool) { + return get(ctx, []string{ + TargetVariable, + + // Primary variable name for the bundle target until v0.203.2. + // See https://github.com/databricks/cli/pull/670. + "DATABRICKS_BUNDLE_ENV", + }) +} diff --git a/bundle/env/target_test.go b/bundle/env/target_test.go new file mode 100644 index 000000000..0c15bf917 --- /dev/null +++ b/bundle/env/target_test.go @@ -0,0 +1,43 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestTarget(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("first", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TARGET", "foo") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", target) + }) + + t.Run("second", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_ENV", "foo") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", target) + }) + + t.Run("both set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TARGET", "first") + t.Setenv("DATABRICKS_BUNDLE_ENV", "second") + target, ok := Target(ctx) + assert.True(t, ok) + assert.Equal(t, "first", target) + }) + + t.Run("not set", func(t *testing.T) { + target, ok := Target(ctx) + assert.False(t, ok) + assert.Equal(t, "", target) + }) +} diff --git a/bundle/env/temp_dir.go b/bundle/env/temp_dir.go new file mode 100644 index 000000000..b91339079 --- /dev/null +++ b/bundle/env/temp_dir.go @@ -0,0 +1,13 @@ +package env + +import "context" + +// TempDirVariable names the environment variable that holds the temporary directory to use. +const TempDirVariable = "DATABRICKS_BUNDLE_TMP" + +// TempDir returns the temporary directory to use. +func TempDir(ctx context.Context) (string, bool) { + return get(ctx, []string{ + TempDirVariable, + }) +} diff --git a/bundle/env/temp_dir_test.go b/bundle/env/temp_dir_test.go new file mode 100644 index 000000000..7659bac6d --- /dev/null +++ b/bundle/env/temp_dir_test.go @@ -0,0 +1,28 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestTempDir(t *testing.T) { + ctx := context.Background() + + testutil.CleanupEnvironment(t) + + t.Run("set", func(t *testing.T) { + t.Setenv("DATABRICKS_BUNDLE_TMP", "foo") + tempDir, ok := TempDir(ctx) + assert.True(t, ok) + assert.Equal(t, "foo", tempDir) + }) + + t.Run("not set", func(t *testing.T) { + tempDir, ok := TempDir(ctx) + assert.False(t, ok) + assert.Equal(t, "", tempDir) + }) +} diff --git a/bundle/root.go b/bundle/root.go index 46f63e134..7518bf5fc 100644 --- a/bundle/root.go +++ b/bundle/root.go @@ -1,21 +1,21 @@ package bundle import ( + "context" "fmt" "os" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" "github.com/databricks/cli/folders" ) -const envBundleRoot = "BUNDLE_ROOT" - -// getRootEnv returns the value of the `BUNDLE_ROOT` environment variable +// getRootEnv returns the value of the bundle root environment variable // if it set and is a directory. If the environment variable is set but // is not a directory, it returns an error. If the environment variable is // not set, it returns an empty string. -func getRootEnv() (string, error) { - path, ok := os.LookupEnv(envBundleRoot) +func getRootEnv(ctx context.Context) (string, error) { + path, ok := env.Root(ctx) if !ok { return "", nil } @@ -24,7 +24,7 @@ func getRootEnv() (string, error) { err = fmt.Errorf("not a directory") } if err != nil { - return "", fmt.Errorf(`invalid bundle root %s="%s": %w`, envBundleRoot, path, err) + return "", fmt.Errorf(`invalid bundle root %s="%s": %w`, env.RootVariable, path, err) } return path, nil } @@ -48,8 +48,8 @@ func getRootWithTraversal() (string, error) { } // mustGetRoot returns a bundle root or an error if one cannot be found. -func mustGetRoot() (string, error) { - path, err := getRootEnv() +func mustGetRoot(ctx context.Context) (string, error) { + path, err := getRootEnv(ctx) if path != "" || err != nil { return path, err } @@ -57,9 +57,9 @@ func mustGetRoot() (string, error) { } // tryGetRoot returns a bundle root or an empty string if one cannot be found. -func tryGetRoot() (string, error) { +func tryGetRoot(ctx context.Context) (string, error) { // Note: an invalid value in the environment variable is still an error. - path, err := getRootEnv() + path, err := getRootEnv(ctx) if path != "" || err != nil { return path, err } diff --git a/bundle/root_test.go b/bundle/root_test.go index 0c4c46aaf..88113546c 100644 --- a/bundle/root_test.go +++ b/bundle/root_test.go @@ -7,6 +7,7 @@ import ( "testing" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/env" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -32,49 +33,55 @@ func chdir(t *testing.T, dir string) string { } func TestRootFromEnv(t *testing.T) { + ctx := context.Background() dir := t.TempDir() - t.Setenv(envBundleRoot, dir) + t.Setenv(env.RootVariable, dir) // It should pull the root from the environment variable. - root, err := mustGetRoot() + root, err := mustGetRoot(ctx) require.NoError(t, err) require.Equal(t, root, dir) } func TestRootFromEnvDoesntExist(t *testing.T) { + ctx := context.Background() dir := t.TempDir() - t.Setenv(envBundleRoot, filepath.Join(dir, "doesntexist")) + t.Setenv(env.RootVariable, filepath.Join(dir, "doesntexist")) // It should pull the root from the environment variable. - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootFromEnvIsFile(t *testing.T) { + ctx := context.Background() dir := t.TempDir() f, err := os.Create(filepath.Join(dir, "invalid")) require.NoError(t, err) f.Close() - t.Setenv(envBundleRoot, f.Name()) + t.Setenv(env.RootVariable, f.Name()) // It should pull the root from the environment variable. - _, err = mustGetRoot() + _, err = mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootIfEnvIsEmpty(t *testing.T) { + ctx := context.Background() dir := "" - t.Setenv(envBundleRoot, dir) + t.Setenv(env.RootVariable, dir) // It should pull the root from the environment variable. - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.Errorf(t, err, "invalid bundle root") } func TestRootLookup(t *testing.T) { + ctx := context.Background() + // Have to set then unset to allow the testing package to revert it to its original value. - t.Setenv(envBundleRoot, "") - os.Unsetenv(envBundleRoot) + t.Setenv(env.RootVariable, "") + os.Unsetenv(env.RootVariable) chdir(t, t.TempDir()) @@ -89,27 +96,30 @@ func TestRootLookup(t *testing.T) { // It should find the project root from $PWD. wd := chdir(t, "./a/b/c") - root, err := mustGetRoot() + root, err := mustGetRoot(ctx) require.NoError(t, err) require.Equal(t, wd, root) } func TestRootLookupError(t *testing.T) { + ctx := context.Background() + // Have to set then unset to allow the testing package to revert it to its original value. - t.Setenv(envBundleRoot, "") - os.Unsetenv(envBundleRoot) + t.Setenv(env.RootVariable, "") + os.Unsetenv(env.RootVariable) // It can't find a project root from a temporary directory. _ = chdir(t, t.TempDir()) - _, err := mustGetRoot() + _, err := mustGetRoot(ctx) require.ErrorContains(t, err, "unable to locate bundle root") } func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { + ctx := context.Background() chdir(t, filepath.Join(".", "tests", "basic")) - t.Setenv(ExtraIncludePathsKey, "test") + t.Setenv(env.IncludesVariable, "test") - bundle, err := MustLoad(context.Background()) + bundle, err := MustLoad(ctx) assert.NoError(t, err) assert.Equal(t, "basic", bundle.Config.Bundle.Name) @@ -119,30 +129,33 @@ func TestLoadYamlWhenIncludesEnvPresent(t *testing.T) { } func TestLoadDefautlBundleWhenNoYamlAndRootAndIncludesEnvPresent(t *testing.T) { + ctx := context.Background() dir := t.TempDir() chdir(t, dir) - t.Setenv(envBundleRoot, dir) - t.Setenv(ExtraIncludePathsKey, "test") + t.Setenv(env.RootVariable, dir) + t.Setenv(env.IncludesVariable, "test") - bundle, err := MustLoad(context.Background()) + bundle, err := MustLoad(ctx) assert.NoError(t, err) assert.Equal(t, dir, bundle.Config.Path) } func TestErrorIfNoYamlNoRootEnvAndIncludesEnvPresent(t *testing.T) { + ctx := context.Background() dir := t.TempDir() chdir(t, dir) - t.Setenv(ExtraIncludePathsKey, "test") + t.Setenv(env.IncludesVariable, "test") - _, err := MustLoad(context.Background()) + _, err := MustLoad(ctx) assert.Error(t, err) } func TestErrorIfNoYamlNoIncludesEnvAndRootEnvPresent(t *testing.T) { + ctx := context.Background() dir := t.TempDir() chdir(t, dir) - t.Setenv(envBundleRoot, dir) + t.Setenv(env.RootVariable, dir) - _, err := MustLoad(context.Background()) + _, err := MustLoad(ctx) assert.Error(t, err) } diff --git a/cmd/bundle/sync.go b/cmd/bundle/sync.go index be45626a3..6d6a6f5a3 100644 --- a/cmd/bundle/sync.go +++ b/cmd/bundle/sync.go @@ -18,12 +18,12 @@ type syncFlags struct { } func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, b *bundle.Bundle) (*sync.SyncOptions, error) { - cacheDir, err := b.CacheDir() + cacheDir, err := b.CacheDir(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } - includes, err := b.GetSyncIncludePatterns() + includes, err := b.GetSyncIncludePatterns(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get list of sync includes: %w", err) } diff --git a/cmd/cmd.go b/cmd/cmd.go index 032fde5cd..6dd0f6e21 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -1,6 +1,7 @@ package cmd import ( + "context" "strings" "github.com/databricks/cli/cmd/account" @@ -21,8 +22,8 @@ const ( permissionsGroup = "permissions" ) -func New() *cobra.Command { - cli := root.New() +func New(ctx context.Context) *cobra.Command { + cli := root.New(ctx) // Add account subcommand. cli.AddCommand(account.New()) diff --git a/cmd/configure/configure_test.go b/cmd/configure/configure_test.go index e1ebe916b..cf0505edd 100644 --- a/cmd/configure/configure_test.go +++ b/cmd/configure/configure_test.go @@ -54,7 +54,7 @@ func TestDefaultConfigureNoInteractive(t *testing.T) { }) os.Stdin = inp - cmd := cmd.New() + cmd := cmd.New(ctx) cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) err := cmd.ExecuteContext(ctx) @@ -87,7 +87,7 @@ func TestConfigFileFromEnvNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - cmd := cmd.New() + cmd := cmd.New(ctx) cmd.SetArgs([]string{"configure", "--token", "--host", "https://host"}) err := cmd.ExecuteContext(ctx) @@ -116,7 +116,7 @@ func TestCustomProfileConfigureNoInteractive(t *testing.T) { t.Cleanup(func() { os.Stdin = oldStdin }) os.Stdin = inp - cmd := cmd.New() + cmd := cmd.New(ctx) cmd.SetArgs([]string{"configure", "--token", "--host", "https://host", "--profile", "CUSTOM"}) err := cmd.ExecuteContext(ctx) diff --git a/cmd/root/bundle.go b/cmd/root/bundle.go index 10cce67a4..3f9d90db6 100644 --- a/cmd/root/bundle.go +++ b/cmd/root/bundle.go @@ -2,17 +2,15 @@ package root import ( "context" - "os" "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config/mutator" + "github.com/databricks/cli/bundle/env" + envlib "github.com/databricks/cli/libs/env" "github.com/spf13/cobra" "golang.org/x/exp/maps" ) -const envName = "DATABRICKS_BUNDLE_ENV" -const targetName = "DATABRICKS_BUNDLE_TARGET" - // getTarget returns the name of the target to operate in. func getTarget(cmd *cobra.Command) (value string) { // The command line flag takes precedence. @@ -33,13 +31,7 @@ func getTarget(cmd *cobra.Command) (value string) { } // If it's not set, use the environment variable. - target := os.Getenv(targetName) - // If target env is not set with a new variable, try to check for old variable name - // TODO: remove when environments section is not supported anymore - if target == "" { - target = os.Getenv(envName) - } - + target, _ := env.Target(cmd.Context()) return target } @@ -54,7 +46,7 @@ func getProfile(cmd *cobra.Command) (value string) { } // If it's not set, use the environment variable. - return os.Getenv("DATABRICKS_CONFIG_PROFILE") + return envlib.Get(cmd.Context(), "DATABRICKS_CONFIG_PROFILE") } // loadBundle loads the bundle configuration and applies default mutators. diff --git a/cmd/root/io.go b/cmd/root/io.go index 380c01b18..23c7d6c64 100644 --- a/cmd/root/io.go +++ b/cmd/root/io.go @@ -1,9 +1,8 @@ package root import ( - "os" - "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" ) @@ -21,7 +20,7 @@ func initOutputFlag(cmd *cobra.Command) *outputFlag { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envOutputFormat); ok { + if v, ok := env.Lookup(cmd.Context(), envOutputFormat); ok { f.output.Set(v) } diff --git a/cmd/root/logger.go b/cmd/root/logger.go index ddfae445a..dca07ca4b 100644 --- a/cmd/root/logger.go +++ b/cmd/root/logger.go @@ -5,9 +5,9 @@ import ( "fmt" "io" "log/slog" - "os" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" "github.com/databricks/cli/libs/log" "github.com/fatih/color" @@ -126,13 +126,13 @@ func initLogFlags(cmd *cobra.Command) *logFlags { // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envLogFile); ok { + if v, ok := env.Lookup(cmd.Context(), envLogFile); ok { f.file.Set(v) } - if v, ok := os.LookupEnv(envLogLevel); ok { + if v, ok := env.Lookup(cmd.Context(), envLogLevel); ok { f.level.Set(v) } - if v, ok := os.LookupEnv(envLogFormat); ok { + if v, ok := env.Lookup(cmd.Context(), envLogFormat); ok { f.output.Set(v) } diff --git a/cmd/root/progress_logger.go b/cmd/root/progress_logger.go index bdf52558b..328b99476 100644 --- a/cmd/root/progress_logger.go +++ b/cmd/root/progress_logger.go @@ -6,6 +6,7 @@ import ( "os" "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/env" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" "golang.org/x/term" @@ -51,7 +52,7 @@ func initProgressLoggerFlag(cmd *cobra.Command, logFlags *logFlags) *progressLog // Configure defaults from environment, if applicable. // If the provided value is invalid it is ignored. - if v, ok := os.LookupEnv(envProgressFormat); ok { + if v, ok := env.Lookup(cmd.Context(), envProgressFormat); ok { f.Set(v) } diff --git a/cmd/root/root.go b/cmd/root/root.go index c71cf9eac..38eb42ccb 100644 --- a/cmd/root/root.go +++ b/cmd/root/root.go @@ -14,7 +14,7 @@ import ( "github.com/spf13/cobra" ) -func New() *cobra.Command { +func New(ctx context.Context) *cobra.Command { cmd := &cobra.Command{ Use: "databricks", Short: "Databricks CLI", @@ -30,6 +30,10 @@ func New() *cobra.Command { SilenceErrors: true, } + // Pass the context along through the command during initialization. + // It will be overwritten when the command is executed. + cmd.SetContext(ctx) + // Initialize flags logFlags := initLogFlags(cmd) progressLoggerFlag := initProgressLoggerFlag(cmd, logFlags) diff --git a/cmd/root/user_agent_upstream.go b/cmd/root/user_agent_upstream.go index 3e173bda8..f580b4263 100644 --- a/cmd/root/user_agent_upstream.go +++ b/cmd/root/user_agent_upstream.go @@ -2,8 +2,8 @@ package root import ( "context" - "os" + "github.com/databricks/cli/libs/env" "github.com/databricks/databricks-sdk-go/useragent" ) @@ -16,7 +16,7 @@ const upstreamKey = "upstream" const upstreamVersionKey = "upstream-version" func withUpstreamInUserAgent(ctx context.Context) context.Context { - value := os.Getenv(upstreamEnvVar) + value := env.Get(ctx, upstreamEnvVar) if value == "" { return ctx } @@ -24,7 +24,7 @@ func withUpstreamInUserAgent(ctx context.Context) context.Context { ctx = useragent.InContext(ctx, upstreamKey, value) // Include upstream version as well, if set. - value = os.Getenv(upstreamVersionEnvVar) + value = env.Get(ctx, upstreamVersionEnvVar) if value == "" { return ctx } diff --git a/cmd/sync/sync.go b/cmd/sync/sync.go index 4a62123ba..5fdfb169d 100644 --- a/cmd/sync/sync.go +++ b/cmd/sync/sync.go @@ -30,12 +30,12 @@ func (f *syncFlags) syncOptionsFromBundle(cmd *cobra.Command, args []string, b * return nil, fmt.Errorf("SRC and DST are not configurable in the context of a bundle") } - cacheDir, err := b.CacheDir() + cacheDir, err := b.CacheDir(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get bundle cache directory: %w", err) } - includes, err := b.GetSyncIncludePatterns() + includes, err := b.GetSyncIncludePatterns(cmd.Context()) if err != nil { return nil, fmt.Errorf("cannot get list of sync includes: %w", err) } diff --git a/internal/helpers.go b/internal/helpers.go index bf27fbb55..68c00019a 100644 --- a/internal/helpers.go +++ b/internal/helpers.go @@ -118,7 +118,7 @@ func (t *cobraTestRunner) RunBackground() { var stdoutW, stderrW io.WriteCloser stdoutR, stdoutW = io.Pipe() stderrR, stderrW = io.Pipe() - root := cmd.New() + root := cmd.New(context.Background()) root.SetOut(stdoutW) root.SetErr(stderrW) root.SetArgs(t.args) diff --git a/internal/testutil/env.go b/internal/testutil/env.go new file mode 100644 index 000000000..05ffaf002 --- /dev/null +++ b/internal/testutil/env.go @@ -0,0 +1,33 @@ +package testutil + +import ( + "os" + "strings" + "testing" +) + +// CleanupEnvironment sets up a pristine environment containing only $PATH and $HOME. +// The original environment is restored upon test completion. +// Note: use of this function is incompatible with parallel execution. +func CleanupEnvironment(t *testing.T) { + // Restore environment when test finishes. + environ := os.Environ() + t.Cleanup(func() { + // Restore original environment. + for _, kv := range environ { + kvs := strings.SplitN(kv, "=", 2) + os.Setenv(kvs[0], kvs[1]) + } + }) + + path := os.Getenv("PATH") + pwd := os.Getenv("PWD") + os.Clearenv() + + // We use t.Setenv instead of os.Setenv because the former actively + // prevents a test being run with t.Parallel. Modifying the environment + // within a test is not compatible with running tests in parallel + // because of isolation; the environment is scoped to the process. + t.Setenv("PATH", path) + t.Setenv("HOME", pwd) +} diff --git a/libs/env/context.go b/libs/env/context.go new file mode 100644 index 000000000..cf04c1ece --- /dev/null +++ b/libs/env/context.go @@ -0,0 +1,63 @@ +package env + +import ( + "context" + "os" +) + +var envContextKey int + +func copyMap(m map[string]string) map[string]string { + out := make(map[string]string, len(m)) + for k, v := range m { + out[k] = v + } + return out +} + +func getMap(ctx context.Context) map[string]string { + if ctx == nil { + return nil + } + m, ok := ctx.Value(&envContextKey).(map[string]string) + if !ok { + return nil + } + return m +} + +func setMap(ctx context.Context, m map[string]string) context.Context { + return context.WithValue(ctx, &envContextKey, m) +} + +// Lookup key in the context or the the environment. +// Context has precedence. +func Lookup(ctx context.Context, key string) (string, bool) { + m := getMap(ctx) + + // Return if the key is set in the context. + v, ok := m[key] + if ok { + return v, true + } + + // Fall back to the environment. + return os.LookupEnv(key) +} + +// Get key from the context or the environment. +// Context has precedence. +func Get(ctx context.Context, key string) string { + v, _ := Lookup(ctx, key) + return v +} + +// Set key on the context. +// +// Note: this does NOT mutate the processes' actual environment variables. +// It is only visible to other code that uses this package. +func Set(ctx context.Context, key, value string) context.Context { + m := copyMap(getMap(ctx)) + m[key] = value + return setMap(ctx, m) +} diff --git a/libs/env/context_test.go b/libs/env/context_test.go new file mode 100644 index 000000000..9ff194597 --- /dev/null +++ b/libs/env/context_test.go @@ -0,0 +1,41 @@ +package env + +import ( + "context" + "testing" + + "github.com/databricks/cli/internal/testutil" + "github.com/stretchr/testify/assert" +) + +func TestContext(t *testing.T) { + testutil.CleanupEnvironment(t) + t.Setenv("FOO", "bar") + + ctx0 := context.Background() + + // Get + assert.Equal(t, "bar", Get(ctx0, "FOO")) + assert.Equal(t, "", Get(ctx0, "dontexist")) + + // Lookup + v, ok := Lookup(ctx0, "FOO") + assert.True(t, ok) + assert.Equal(t, "bar", v) + v, ok = Lookup(ctx0, "dontexist") + assert.False(t, ok) + assert.Equal(t, "", v) + + // Set and get new context. + // Verify that the previous context remains unchanged. + ctx1 := Set(ctx0, "FOO", "baz") + assert.Equal(t, "baz", Get(ctx1, "FOO")) + assert.Equal(t, "bar", Get(ctx0, "FOO")) + + // Set and get new context. + // Verify that the previous contexts remains unchanged. + ctx2 := Set(ctx1, "FOO", "qux") + assert.Equal(t, "qux", Get(ctx2, "FOO")) + assert.Equal(t, "baz", Get(ctx1, "FOO")) + assert.Equal(t, "bar", Get(ctx0, "FOO")) +} diff --git a/libs/env/pkg.go b/libs/env/pkg.go new file mode 100644 index 000000000..e0be7e225 --- /dev/null +++ b/libs/env/pkg.go @@ -0,0 +1,7 @@ +package env + +// The env package provides functions for working with environment variables +// and allowing for overrides via the context.Context. This is useful for +// testing where tainting a processes' environment is at odds with parallelism. +// Use of a context.Context to store variable overrides means tests can be +// parallelized without worrying about environment variable interference. diff --git a/main.go b/main.go index a4b8aabd6..8c8516d9d 100644 --- a/main.go +++ b/main.go @@ -1,10 +1,12 @@ package main import ( + "context" + "github.com/databricks/cli/cmd" "github.com/databricks/cli/cmd/root" ) func main() { - root.Execute(cmd.New()) + root.Execute(cmd.New(context.Background())) } diff --git a/main_test.go b/main_test.go index 6a5d19448..34ecdca0f 100644 --- a/main_test.go +++ b/main_test.go @@ -1,6 +1,7 @@ package main import ( + "context" "testing" "github.com/databricks/cli/cmd" @@ -15,7 +16,7 @@ func TestCommandsDontUseUnderscoreInName(t *testing.T) { // This test lives in the main package because this is where // all commands are imported. // - queue := []*cobra.Command{cmd.New()} + queue := []*cobra.Command{cmd.New(context.Background())} for len(queue) > 0 { cmd := queue[0] assert.NotContains(t, cmd.Name(), "_") From c836194d89b376ba335501dbbb5995518b3dbd21 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 11 Sep 2023 10:18:52 +0200 Subject: [PATCH 127/139] Update Go SDK to v0.19.1 (#759) ## Changes This includes token reuse for Azure CLI based auth. See: https://github.com/databricks/databricks-sdk-go/releases/tag/v0.19.1 ## Tests Confirmed manually that Azure CLI tokens are acquired only once. --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 7e24b0db2..14c85e675 100644 --- a/go.mod +++ b/go.mod @@ -4,7 +4,7 @@ go 1.21 require ( github.com/briandowns/spinner v1.23.0 // Apache 2.0 - github.com/databricks/databricks-sdk-go v0.19.0 // Apache 2.0 + github.com/databricks/databricks-sdk-go v0.19.1 // Apache 2.0 github.com/fatih/color v1.15.0 // MIT github.com/ghodss/yaml v1.0.0 // MIT + NOTICE github.com/google/uuid v1.3.0 // BSD-3-Clause diff --git a/go.sum b/go.sum index 83bb01b62..20c985b07 100644 --- a/go.sum +++ b/go.sum @@ -36,8 +36,8 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/databricks/databricks-sdk-go v0.19.0 h1:Xh5A90/+8ehW7fTqoQbQK5xZu7a/akv3Xwv8UdWB4GU= -github.com/databricks/databricks-sdk-go v0.19.0/go.mod h1:Bt/3i3ry/rQdE6Y+psvkAENlp+LzJHaQK5PsLIstQb4= +github.com/databricks/databricks-sdk-go v0.19.1 h1:hP7xZb+Hd8n0grnEcf2FOMn6lWox7vp5KAan3D2hnzM= +github.com/databricks/databricks-sdk-go v0.19.1/go.mod h1:Bt/3i3ry/rQdE6Y+psvkAENlp+LzJHaQK5PsLIstQb4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= From a4e94e1b3662249599f43009be4b2683e31d943b Mon Sep 17 00:00:00 2001 From: "Lennart Kats (databricks)" Date: Mon, 11 Sep 2023 10:59:48 +0200 Subject: [PATCH 128/139] Fix author in setup.py (#761) Fix author in setup.py showing --- .../default-python/template/{{.project_name}}/setup.py.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl index 93f4e9ff9..efd598820 100644 --- a/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl +++ b/libs/template/templates/default-python/template/{{.project_name}}/setup.py.tmpl @@ -15,7 +15,7 @@ setup( name="{{.project_name}}", version={{.project_name}}.__version__, url="https://databricks.com", - author="{{.user_name}}", + author="{{user_name}}", description="my test wheel", packages=find_packages(where='./src'), package_dir={'': 'src'}, From ad84abf41588eeab78484caff876546ecf2d4199 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 11 Sep 2023 12:22:05 +0200 Subject: [PATCH 129/139] Fix temporary directory cleanup for init repository downloading (#760) ## Changes This PR fixes a bug where the temp directory created to download the template would not be cleaned up. ## Tests Tested manually. The exact process is described in a comment below. --- cmd/bundle/init.go | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/cmd/bundle/init.go b/cmd/bundle/init.go index 9a11eb257..3038cb7a2 100644 --- a/cmd/bundle/init.go +++ b/cmd/bundle/init.go @@ -74,22 +74,21 @@ func newInitCommand() *cobra.Command { return template.Materialize(ctx, configFile, templatePath, outputDir) } - // Download the template in a temporary directory - tmpDir := os.TempDir() - templateURL := templatePath - repoDir := filepath.Join(tmpDir, repoName(templateURL)) - err := os.MkdirAll(repoDir, 0755) + // Create a temporary directory with the name of the repository. The '*' + // character is replaced by a random string in the generated temporary directory. + repoDir, err := os.MkdirTemp("", repoName(templatePath)+"-*") if err != nil { return err } // TODO: Add automated test that the downloaded git repo is cleaned up. - err = git.Clone(ctx, templateURL, "", repoDir) + // Clone the repository in the temporary directory + err = git.Clone(ctx, templatePath, "", repoDir) if err != nil { return err } - defer os.RemoveAll(templateDir) + // Clean up downloaded repository once the template is materialized. + defer os.RemoveAll(repoDir) return template.Materialize(ctx, configFile, filepath.Join(repoDir, templateDir), outputDir) } - return cmd } From 44726d6444dc0a29942445fb99f3aba3a9378693 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 11 Sep 2023 13:57:21 +0200 Subject: [PATCH 130/139] Release v0.204.1 (#763) Bundles: * Fix conversion of job parameters ([#744](https://github.com/databricks/cli/pull/744)). * Add schema and config validation to jsonschema package ([#740](https://github.com/databricks/cli/pull/740)). * Support Model Serving Endpoints in bundles ([#682](https://github.com/databricks/cli/pull/682)). * Do not include empty output in job run output ([#749](https://github.com/databricks/cli/pull/749)). * Fixed marking libraries from DBFS as remote ([#750](https://github.com/databricks/cli/pull/750)). * Process only Python wheel tasks which have local libraries used ([#751](https://github.com/databricks/cli/pull/751)). * Add enum support for bundle templates ([#668](https://github.com/databricks/cli/pull/668)). * Apply Python wheel trampoline if workspace library is used ([#755](https://github.com/databricks/cli/pull/755)). * List available targets when incorrect target passed ([#756](https://github.com/databricks/cli/pull/756)). * Make bundle and sync fields optional ([#757](https://github.com/databricks/cli/pull/757)). * Consolidate environment variable interaction ([#747](https://github.com/databricks/cli/pull/747)). Internal: * Update Go SDK to v0.19.1 ([#759](https://github.com/databricks/cli/pull/759)). --- CHANGELOG.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9835b0bce..ba0dbcdcf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Version changelog +## 0.204.1 + +Bundles: + * Fix conversion of job parameters ([#744](https://github.com/databricks/cli/pull/744)). + * Add schema and config validation to jsonschema package ([#740](https://github.com/databricks/cli/pull/740)). + * Support Model Serving Endpoints in bundles ([#682](https://github.com/databricks/cli/pull/682)). + * Do not include empty output in job run output ([#749](https://github.com/databricks/cli/pull/749)). + * Fixed marking libraries from DBFS as remote ([#750](https://github.com/databricks/cli/pull/750)). + * Process only Python wheel tasks which have local libraries used ([#751](https://github.com/databricks/cli/pull/751)). + * Add enum support for bundle templates ([#668](https://github.com/databricks/cli/pull/668)). + * Apply Python wheel trampoline if workspace library is used ([#755](https://github.com/databricks/cli/pull/755)). + * List available targets when incorrect target passed ([#756](https://github.com/databricks/cli/pull/756)). + * Make bundle and sync fields optional ([#757](https://github.com/databricks/cli/pull/757)). + * Consolidate environment variable interaction ([#747](https://github.com/databricks/cli/pull/747)). + +Internal: + * Update Go SDK to v0.19.1 ([#759](https://github.com/databricks/cli/pull/759)). + + + ## 0.204.0 This release includes permission related commands for a subset of workspace From 373f441eb2e5a8a07905882caa465c039ba05511 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Mon, 11 Sep 2023 17:23:25 +0200 Subject: [PATCH 131/139] Use clearer error message when no interpolation value is found. (#764) ## Changes This PR makes the error message clearer for when interpolation fails. ## Tests Existing unit test and manually --- bundle/config/interpolation/interpolation.go | 2 +- bundle/config/interpolation/interpolation_test.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/bundle/config/interpolation/interpolation.go b/bundle/config/interpolation/interpolation.go index bf5bd169e..8ba0b8b1f 100644 --- a/bundle/config/interpolation/interpolation.go +++ b/bundle/config/interpolation/interpolation.go @@ -184,7 +184,7 @@ func (a *accumulator) Resolve(path string, seenPaths []string, fns ...LookupFunc // fetch the string node to resolve field, ok := a.strings[path] if !ok { - return fmt.Errorf("could not resolve reference %s", path) + return fmt.Errorf("no value found for interpolation reference: ${%s}", path) } // return early if the string field has no variables to interpolate diff --git a/bundle/config/interpolation/interpolation_test.go b/bundle/config/interpolation/interpolation_test.go index 83254c9b0..cccb6dc71 100644 --- a/bundle/config/interpolation/interpolation_test.go +++ b/bundle/config/interpolation/interpolation_test.go @@ -247,5 +247,5 @@ func TestInterpolationInvalidVariableReference(t *testing.T) { } err := expand(&config) - assert.ErrorContains(t, err, "could not resolve reference vars.foo") + assert.ErrorContains(t, err, "no value found for interpolation reference: ${vars.foo}") } From 0cb05d1dedc4cdb591b884248b8871f73950c9bd Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 11 Sep 2023 17:32:24 +0200 Subject: [PATCH 132/139] Prompt once for a client profile (#727) ## Changes The previous implementation ran the risk of infinite looping for the account client due to a mismatch in determining what constitutes an account client between the CLI and SDK (see [here](https://github.com/databricks/cli/blob/83443bae8d8ad4df3758f4192c6bbe613faae9c4/libs/databrickscfg/profiles.go#L61) and [here](https://github.com/databricks/databricks-sdk-go/blob/0fdc5165e57a4e7af6ec97b47595c6dddf37b10b/config/config.go#L160)). Ultimately, this code must never infinite loop. If a user is prompted and selects a profile that cannot be used, they should receive that feedback immediately and try again, instead of being prompted again. Related to #726. ## Tests --- cmd/root/auth.go | 146 +++++++++++++++++++++++++------------ cmd/root/auth_test.go | 164 ++++++++++++++++++++++++++++++++++++++++++ libs/cmdio/io.go | 7 ++ libs/cmdio/testing.go | 46 ++++++++++++ 4 files changed, 318 insertions(+), 45 deletions(-) create mode 100644 libs/cmdio/testing.go diff --git a/cmd/root/auth.go b/cmd/root/auth.go index d4c9a31b9..de5648c65 100644 --- a/cmd/root/auth.go +++ b/cmd/root/auth.go @@ -25,13 +25,57 @@ func initProfileFlag(cmd *cobra.Command) { cmd.RegisterFlagCompletionFunc("profile", databrickscfg.ProfileCompletion) } +func profileFlagValue(cmd *cobra.Command) (string, bool) { + profileFlag := cmd.Flag("profile") + if profileFlag == nil { + return "", false + } + value := profileFlag.Value.String() + return value, value != "" +} + +// Helper function to create an account client or prompt once if the given configuration is not valid. +func accountClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt bool) (*databricks.AccountClient, error) { + a, err := databricks.NewAccountClient((*databricks.Config)(cfg)) + if err == nil { + err = a.Config.Authenticate(emptyHttpRequest(ctx)) + } + + prompt := false + if allowPrompt && err != nil && cmdio.IsInteractive(ctx) { + // Prompt to select a profile if the current configuration is not an account client. + prompt = prompt || errors.Is(err, databricks.ErrNotAccountClient) + // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. + prompt = prompt || errors.Is(err, config.ErrCannotConfigureAuth) + } + + if !prompt { + // If we are not prompting, we can return early. + return a, err + } + + // Try picking a profile dynamically if the current configuration is not valid. + profile, err := askForAccountProfile(ctx) + if err != nil { + return nil, err + } + a, err = databricks.NewAccountClient(&databricks.Config{Profile: profile}) + if err == nil { + err = a.Config.Authenticate(emptyHttpRequest(ctx)) + if err != nil { + return nil, err + } + } + return a, nil +} + func MustAccountClient(cmd *cobra.Command, args []string) error { cfg := &config.Config{} - // command-line flag can specify the profile in use - profileFlag := cmd.Flag("profile") - if profileFlag != nil { - cfg.Profile = profileFlag.Value.String() + // The command-line profile flag takes precedence over DATABRICKS_CONFIG_PROFILE. + profile, hasProfileFlag := profileFlagValue(cmd) + if hasProfileFlag { + cfg.Profile = profile } if cfg.Profile == "" { @@ -48,16 +92,8 @@ func MustAccountClient(cmd *cobra.Command, args []string) error { } } -TRY_AUTH: // or try picking a config profile dynamically - a, err := databricks.NewAccountClient((*databricks.Config)(cfg)) - if cmdio.IsInteractive(cmd.Context()) && errors.Is(err, databricks.ErrNotAccountClient) { - profile, err := askForAccountProfile() - if err != nil { - return err - } - cfg = &config.Config{Profile: profile} - goto TRY_AUTH - } + allowPrompt := !hasProfileFlag + a, err := accountClientOrPrompt(cmd.Context(), cfg, allowPrompt) if err != nil { return err } @@ -66,13 +102,48 @@ TRY_AUTH: // or try picking a config profile dynamically return nil } +// Helper function to create a workspace client or prompt once if the given configuration is not valid. +func workspaceClientOrPrompt(ctx context.Context, cfg *config.Config, allowPrompt bool) (*databricks.WorkspaceClient, error) { + w, err := databricks.NewWorkspaceClient((*databricks.Config)(cfg)) + if err == nil { + err = w.Config.Authenticate(emptyHttpRequest(ctx)) + } + + prompt := false + if allowPrompt && err != nil && cmdio.IsInteractive(ctx) { + // Prompt to select a profile if the current configuration is not a workspace client. + prompt = prompt || errors.Is(err, databricks.ErrNotWorkspaceClient) + // Prompt to select a profile if the current configuration doesn't resolve to a credential provider. + prompt = prompt || errors.Is(err, config.ErrCannotConfigureAuth) + } + + if !prompt { + // If we are not prompting, we can return early. + return w, err + } + + // Try picking a profile dynamically if the current configuration is not valid. + profile, err := askForWorkspaceProfile(ctx) + if err != nil { + return nil, err + } + w, err = databricks.NewWorkspaceClient(&databricks.Config{Profile: profile}) + if err == nil { + err = w.Config.Authenticate(emptyHttpRequest(ctx)) + if err != nil { + return nil, err + } + } + return w, nil +} + func MustWorkspaceClient(cmd *cobra.Command, args []string) error { cfg := &config.Config{} - // command-line flag takes precedence over environment variable - profileFlag := cmd.Flag("profile") - if profileFlag != nil { - cfg.Profile = profileFlag.Value.String() + // The command-line profile flag takes precedence over DATABRICKS_CONFIG_PROFILE. + profile, hasProfileFlag := profileFlagValue(cmd) + if hasProfileFlag { + cfg.Profile = profile } // try configuring a bundle @@ -87,24 +158,13 @@ func MustWorkspaceClient(cmd *cobra.Command, args []string) error { cfg = currentBundle.WorkspaceClient().Config } -TRY_AUTH: // or try picking a config profile dynamically + allowPrompt := !hasProfileFlag + w, err := workspaceClientOrPrompt(cmd.Context(), cfg, allowPrompt) + if err != nil { + return err + } + ctx := cmd.Context() - w, err := databricks.NewWorkspaceClient((*databricks.Config)(cfg)) - if err != nil { - return err - } - err = w.Config.Authenticate(emptyHttpRequest(ctx)) - if cmdio.IsInteractive(ctx) && errors.Is(err, config.ErrCannotConfigureAuth) { - profile, err := askForWorkspaceProfile() - if err != nil { - return err - } - cfg = &config.Config{Profile: profile} - goto TRY_AUTH - } - if err != nil { - return err - } ctx = context.WithValue(ctx, &workspaceClient, w) cmd.SetContext(ctx) return nil @@ -121,7 +181,7 @@ func transformLoadError(path string, err error) error { return err } -func askForWorkspaceProfile() (string, error) { +func askForWorkspaceProfile(ctx context.Context) (string, error) { path, err := databrickscfg.GetPath() if err != nil { return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) @@ -136,7 +196,7 @@ func askForWorkspaceProfile() (string, error) { case 1: return profiles[0].Name, nil } - i, _, err := (&promptui.Select{ + i, _, err := cmdio.RunSelect(ctx, &promptui.Select{ Label: fmt.Sprintf("Workspace profiles defined in %s", file), Items: profiles, Searcher: profiles.SearchCaseInsensitive, @@ -147,16 +207,14 @@ func askForWorkspaceProfile() (string, error) { Inactive: `{{.Name}}`, Selected: `{{ "Using workspace profile" | faint }}: {{ .Name | bold }}`, }, - Stdin: os.Stdin, - Stdout: os.Stderr, - }).Run() + }) if err != nil { return "", err } return profiles[i].Name, nil } -func askForAccountProfile() (string, error) { +func askForAccountProfile(ctx context.Context) (string, error) { path, err := databrickscfg.GetPath() if err != nil { return "", fmt.Errorf("cannot determine Databricks config file path: %w", err) @@ -171,7 +229,7 @@ func askForAccountProfile() (string, error) { case 1: return profiles[0].Name, nil } - i, _, err := (&promptui.Select{ + i, _, err := cmdio.RunSelect(ctx, &promptui.Select{ Label: fmt.Sprintf("Account profiles defined in %s", file), Items: profiles, Searcher: profiles.SearchCaseInsensitive, @@ -182,9 +240,7 @@ func askForAccountProfile() (string, error) { Inactive: `{{.Name}}`, Selected: `{{ "Using account profile" | faint }}: {{ .Name | bold }}`, }, - Stdin: os.Stdin, - Stdout: os.Stderr, - }).Run() + }) if err != nil { return "", err } diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index 75d255b58..70a52d50d 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -2,9 +2,15 @@ package root import ( "context" + "os" + "path/filepath" "testing" + "time" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestEmptyHttpRequest(t *testing.T) { @@ -12,3 +18,161 @@ func TestEmptyHttpRequest(t *testing.T) { req := emptyHttpRequest(ctx) assert.Equal(t, req.Context(), ctx) } + +type promptFn func(ctx context.Context, cfg *config.Config, retry bool) (any, error) + +var accountPromptFn = func(ctx context.Context, cfg *config.Config, retry bool) (any, error) { + return accountClientOrPrompt(ctx, cfg, retry) +} + +var workspacePromptFn = func(ctx context.Context, cfg *config.Config, retry bool) (any, error) { + return workspaceClientOrPrompt(ctx, cfg, retry) +} + +func expectPrompts(t *testing.T, fn promptFn, config *config.Config) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + // Channel to pass errors from the prompting function back to the test. + errch := make(chan error, 1) + + ctx, io := cmdio.SetupTest(ctx) + go func() { + defer close(errch) + defer cancel() + _, err := fn(ctx, config, true) + errch <- err + }() + + // Expect a prompt + line, _, err := io.Stderr.ReadLine() + if assert.NoError(t, err, "Expected to read a line from stderr") { + assert.Contains(t, string(line), "Search:") + } else { + // If there was an error reading from stderr, the prompting function must have terminated early. + assert.NoError(t, <-errch) + } +} + +func expectReturns(t *testing.T, fn promptFn, config *config.Config) { + ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second) + defer cancel() + + ctx, _ = cmdio.SetupTest(ctx) + client, err := fn(ctx, config, true) + require.NoError(t, err) + require.NotNil(t, client) +} + +func TestAccountClientOrPrompt(t *testing.T) { + dir := t.TempDir() + configFile := filepath.Join(dir, ".databrickscfg") + err := os.WriteFile( + configFile, + []byte(` + [account-1111] + host = https://accounts.azuredatabricks.net/ + account_id = 1111 + token = foobar + + [account-1112] + host = https://accounts.azuredatabricks.net/ + account_id = 1112 + token = foobar + `), + 0755) + require.NoError(t, err) + t.Setenv("DATABRICKS_CONFIG_FILE", configFile) + t.Setenv("PATH", "/nothing") + + t.Run("Prompt if nothing is specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{}) + }) + + t.Run("Prompt if a workspace host is specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://adb-1234567.89.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Prompt if account ID is not specified", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + Token: "foobar", + }) + }) + + t.Run("Prompt if no credential provider can be configured", func(t *testing.T) { + expectPrompts(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + }) + }) + + t.Run("Returns if configuration is valid", func(t *testing.T) { + expectReturns(t, accountPromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Returns if a valid profile is specified", func(t *testing.T) { + expectReturns(t, accountPromptFn, &config.Config{ + Profile: "account-1111", + }) + }) +} + +func TestWorkspaceClientOrPrompt(t *testing.T) { + dir := t.TempDir() + configFile := filepath.Join(dir, ".databrickscfg") + err := os.WriteFile( + configFile, + []byte(` + [workspace-1111] + host = https://adb-1111.11.azuredatabricks.net/ + token = foobar + + [workspace-1112] + host = https://adb-1112.12.azuredatabricks.net/ + token = foobar + `), + 0755) + require.NoError(t, err) + t.Setenv("DATABRICKS_CONFIG_FILE", configFile) + t.Setenv("PATH", "/nothing") + + t.Run("Prompt if nothing is specified", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{}) + }) + + t.Run("Prompt if an account host is specified", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{ + Host: "https://accounts.azuredatabricks.net/", + AccountID: "1234", + Token: "foobar", + }) + }) + + t.Run("Prompt if no credential provider can be configured", func(t *testing.T) { + expectPrompts(t, workspacePromptFn, &config.Config{ + Host: "https://adb-1111.11.azuredatabricks.net/", + }) + }) + + t.Run("Returns if configuration is valid", func(t *testing.T) { + expectReturns(t, workspacePromptFn, &config.Config{ + Host: "https://adb-1111.11.azuredatabricks.net/", + Token: "foobar", + }) + }) + + t.Run("Returns if a valid profile is specified", func(t *testing.T) { + expectReturns(t, workspacePromptFn, &config.Config{ + Profile: "workspace-1111", + }) + }) +} diff --git a/libs/cmdio/io.go b/libs/cmdio/io.go index 9d712e351..cf405a7a4 100644 --- a/libs/cmdio/io.go +++ b/libs/cmdio/io.go @@ -205,6 +205,13 @@ func Prompt(ctx context.Context) *promptui.Prompt { } } +func RunSelect(ctx context.Context, prompt *promptui.Select) (int, string, error) { + c := fromContext(ctx) + prompt.Stdin = io.NopCloser(c.in) + prompt.Stdout = nopWriteCloser{c.err} + return prompt.Run() +} + func (c *cmdIO) simplePrompt(label string) *promptui.Prompt { return &promptui.Prompt{ Label: label, diff --git a/libs/cmdio/testing.go b/libs/cmdio/testing.go new file mode 100644 index 000000000..43592489e --- /dev/null +++ b/libs/cmdio/testing.go @@ -0,0 +1,46 @@ +package cmdio + +import ( + "bufio" + "context" + "io" +) + +type Test struct { + Done context.CancelFunc + + Stdin *bufio.Writer + Stdout *bufio.Reader + Stderr *bufio.Reader +} + +func SetupTest(ctx context.Context) (context.Context, *Test) { + rin, win := io.Pipe() + rout, wout := io.Pipe() + rerr, werr := io.Pipe() + + cmdio := &cmdIO{ + interactive: true, + in: rin, + out: wout, + err: werr, + } + + ctx, cancel := context.WithCancel(ctx) + ctx = InContext(ctx, cmdio) + + // Wait for context to be done, so we can drain stdin and close the pipes. + go func() { + <-ctx.Done() + rin.Close() + wout.Close() + werr.Close() + }() + + return ctx, &Test{ + Done: cancel, + Stdin: bufio.NewWriter(win), + Stdout: bufio.NewReader(rout), + Stderr: bufio.NewReader(rerr), + } +} From a2775f836f2d24fa592f06954e7b60e9ea2bb698 Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Mon, 11 Sep 2023 20:03:12 +0200 Subject: [PATCH 133/139] Use interactive prompt to select resource to run if not specified (#762) ## Changes Display an interactive prompt with a list of resources to run if one isn't specified and the command is run interactively. ## Tests Manually confirmed: * The new prompt works * Shell completion still works * Specifying a key argument still works --- bundle/run/job.go | 7 +++++++ bundle/run/keys.go | 22 +++++++++++++++------- bundle/run/pipeline.go | 7 +++++++ bundle/run/runner.go | 3 +++ cmd/bundle/run.go | 30 +++++++++++++++++++++++++----- 5 files changed, 57 insertions(+), 12 deletions(-) diff --git a/bundle/run/job.go b/bundle/run/job.go index f152a17d0..319cd1464 100644 --- a/bundle/run/job.go +++ b/bundle/run/job.go @@ -95,6 +95,13 @@ type jobRunner struct { job *resources.Job } +func (r *jobRunner) Name() string { + if r.job == nil || r.job.JobSettings == nil { + return "" + } + return r.job.JobSettings.Name +} + func isFailed(task jobs.RunTask) bool { return task.State.LifeCycleState == jobs.RunLifeCycleStateInternalError || (task.State.LifeCycleState == jobs.RunLifeCycleStateTerminated && diff --git a/bundle/run/keys.go b/bundle/run/keys.go index c8b7a2b5b..76ec50ac8 100644 --- a/bundle/run/keys.go +++ b/bundle/run/keys.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/databricks/cli/bundle" + "golang.org/x/exp/maps" ) // RunnerLookup maps identifiers to a list of workloads that match that identifier. @@ -32,18 +33,20 @@ func ResourceKeys(b *bundle.Bundle) (keyOnly RunnerLookup, keyWithType RunnerLoo return } -// ResourceCompletions returns a list of keys that unambiguously reference resources in the bundle. -func ResourceCompletions(b *bundle.Bundle) []string { - seen := make(map[string]bool) - comps := []string{} +// ResourceCompletionMap returns a map of resource keys to their respective names. +func ResourceCompletionMap(b *bundle.Bundle) map[string]string { + out := make(map[string]string) keyOnly, keyWithType := ResourceKeys(b) + // Keep track of resources we have seen by their fully qualified key. + seen := make(map[string]bool) + // First add resources that can be identified by key alone. for k, v := range keyOnly { // Invariant: len(v) >= 1. See [ResourceKeys]. if len(v) == 1 { seen[v[0].Key()] = true - comps = append(comps, k) + out[k] = v[0].Name() } } @@ -54,8 +57,13 @@ func ResourceCompletions(b *bundle.Bundle) []string { if ok { continue } - comps = append(comps, k) + out[k] = v[0].Name() } - return comps + return out +} + +// ResourceCompletions returns a list of keys that unambiguously reference resources in the bundle. +func ResourceCompletions(b *bundle.Bundle) []string { + return maps.Keys(ResourceCompletionMap(b)) } diff --git a/bundle/run/pipeline.go b/bundle/run/pipeline.go index 7b82c3eae..216712d30 100644 --- a/bundle/run/pipeline.go +++ b/bundle/run/pipeline.go @@ -136,6 +136,13 @@ type pipelineRunner struct { pipeline *resources.Pipeline } +func (r *pipelineRunner) Name() string { + if r.pipeline == nil || r.pipeline.PipelineSpec == nil { + return "" + } + return r.pipeline.PipelineSpec.Name +} + func (r *pipelineRunner) Run(ctx context.Context, opts *Options) (output.RunOutput, error) { var pipelineID = r.pipeline.ID diff --git a/bundle/run/runner.go b/bundle/run/runner.go index 227e12d97..7d3c2c297 100644 --- a/bundle/run/runner.go +++ b/bundle/run/runner.go @@ -21,6 +21,9 @@ type Runner interface { // This is used for showing the user hints w.r.t. disambiguation. Key() string + // Name returns the resource's name, if defined. + Name() string + // Run the underlying worklow. Run(ctx context.Context, opts *Options) (output.RunOutput, error) } diff --git a/cmd/bundle/run.go b/cmd/bundle/run.go index 28b9ae7cd..b5a60ee15 100644 --- a/cmd/bundle/run.go +++ b/cmd/bundle/run.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle/phases" "github.com/databricks/cli/bundle/run" "github.com/databricks/cli/cmd/root" + "github.com/databricks/cli/libs/cmdio" "github.com/databricks/cli/libs/flags" "github.com/spf13/cobra" ) @@ -16,9 +17,9 @@ import ( func newRunCommand() *cobra.Command { cmd := &cobra.Command{ Use: "run [flags] KEY", - Short: "Run a workload (e.g. a job or a pipeline)", + Short: "Run a resource (e.g. a job or a pipeline)", - Args: cobra.ExactArgs(1), + Args: cobra.MaximumNArgs(1), PreRunE: ConfigureBundleWithVariables, } @@ -29,9 +30,10 @@ func newRunCommand() *cobra.Command { cmd.Flags().BoolVar(&noWait, "no-wait", false, "Don't wait for the run to complete.") cmd.RunE = func(cmd *cobra.Command, args []string) error { - b := bundle.Get(cmd.Context()) + ctx := cmd.Context() + b := bundle.Get(ctx) - err := bundle.Apply(cmd.Context(), b, bundle.Seq( + err := bundle.Apply(ctx, b, bundle.Seq( phases.Initialize(), terraform.Interpolate(), terraform.Write(), @@ -42,13 +44,31 @@ func newRunCommand() *cobra.Command { return err } + // If no arguments are specified, prompt the user to select something to run. + if len(args) == 0 && cmdio.IsInteractive(ctx) { + // Invert completions from KEY -> NAME, to NAME -> KEY. + inv := make(map[string]string) + for k, v := range run.ResourceCompletionMap(b) { + inv[v] = k + } + id, err := cmdio.Select(ctx, inv, "Resource to run") + if err != nil { + return err + } + args = append(args, id) + } + + if len(args) != 1 { + return fmt.Errorf("expected a KEY of the resource to run") + } + runner, err := run.Find(b, args[0]) if err != nil { return err } runOptions.NoWait = noWait - output, err := runner.Run(cmd.Context(), &runOptions) + output, err := runner.Run(ctx, &runOptions) if err != nil { return err } From 3cb74e72a85071a8a04dc20bace5ac99aa1daaed Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 12 Sep 2023 15:28:53 +0200 Subject: [PATCH 134/139] Run environment related tests in a pristine environment (#769) ## Changes If the caller running the test has one or more environment variables that are used in the test already set, they can interfere and make tests fail. ## Tests Ran tests in `./cmd/root` with Databricks related environment variables set. --- cmd/root/auth_test.go | 5 +++++ cmd/root/bundle_test.go | 19 +++++++++++++------ internal/testutil/env.go | 4 ++++ 3 files changed, 22 insertions(+), 6 deletions(-) diff --git a/cmd/root/auth_test.go b/cmd/root/auth_test.go index 70a52d50d..30fa9a086 100644 --- a/cmd/root/auth_test.go +++ b/cmd/root/auth_test.go @@ -7,6 +7,7 @@ import ( "testing" "time" + "github.com/databricks/cli/internal/testutil" "github.com/databricks/cli/libs/cmdio" "github.com/databricks/databricks-sdk-go/config" "github.com/stretchr/testify/assert" @@ -65,6 +66,8 @@ func expectReturns(t *testing.T, fn promptFn, config *config.Config) { } func TestAccountClientOrPrompt(t *testing.T) { + testutil.CleanupEnvironment(t) + dir := t.TempDir() configFile := filepath.Join(dir, ".databrickscfg") err := os.WriteFile( @@ -127,6 +130,8 @@ func TestAccountClientOrPrompt(t *testing.T) { } func TestWorkspaceClientOrPrompt(t *testing.T) { + testutil.CleanupEnvironment(t) + dir := t.TempDir() configFile := filepath.Join(dir, ".databrickscfg") err := os.WriteFile( diff --git a/cmd/root/bundle_test.go b/cmd/root/bundle_test.go index 09b33d589..3f9641b7e 100644 --- a/cmd/root/bundle_test.go +++ b/cmd/root/bundle_test.go @@ -9,6 +9,7 @@ import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/internal/testutil" "github.com/spf13/cobra" "github.com/stretchr/testify/assert" ) @@ -56,6 +57,8 @@ func setup(t *testing.T, cmd *cobra.Command, host string) *bundle.Bundle { } func TestBundleConfigureDefault(t *testing.T) { + testutil.CleanupEnvironment(t) + cmd := emptyCommand(t) b := setup(t, cmd, "https://x.com") assert.NotPanics(t, func() { @@ -64,6 +67,8 @@ func TestBundleConfigureDefault(t *testing.T) { } func TestBundleConfigureWithMultipleMatches(t *testing.T) { + testutil.CleanupEnvironment(t) + cmd := emptyCommand(t) b := setup(t, cmd, "https://a.com") assert.Panics(t, func() { @@ -72,6 +77,8 @@ func TestBundleConfigureWithMultipleMatches(t *testing.T) { } func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { + testutil.CleanupEnvironment(t) + cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("NOEXIST") @@ -82,6 +89,8 @@ func TestBundleConfigureWithNonExistentProfileFlag(t *testing.T) { } func TestBundleConfigureWithMismatchedProfile(t *testing.T) { + testutil.CleanupEnvironment(t) + cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") @@ -92,6 +101,8 @@ func TestBundleConfigureWithMismatchedProfile(t *testing.T) { } func TestBundleConfigureWithCorrectProfile(t *testing.T) { + testutil.CleanupEnvironment(t) + cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") @@ -102,10 +113,8 @@ func TestBundleConfigureWithCorrectProfile(t *testing.T) { } func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) t.Setenv("DATABRICKS_CONFIG_PROFILE", "PROFILE-1") - t.Cleanup(func() { - t.Setenv("DATABRICKS_CONFIG_PROFILE", "") - }) cmd := emptyCommand(t) b := setup(t, cmd, "https://x.com") @@ -115,10 +124,8 @@ func TestBundleConfigureWithMismatchedProfileEnvVariable(t *testing.T) { } func TestBundleConfigureWithProfileFlagAndEnvVariable(t *testing.T) { + testutil.CleanupEnvironment(t) t.Setenv("DATABRICKS_CONFIG_PROFILE", "NOEXIST") - t.Cleanup(func() { - t.Setenv("DATABRICKS_CONFIG_PROFILE", "") - }) cmd := emptyCommand(t) cmd.Flag("profile").Value.Set("PROFILE-1") diff --git a/internal/testutil/env.go b/internal/testutil/env.go index 05ffaf002..11a610189 100644 --- a/internal/testutil/env.go +++ b/internal/testutil/env.go @@ -2,6 +2,7 @@ package testutil import ( "os" + "runtime" "strings" "testing" ) @@ -30,4 +31,7 @@ func CleanupEnvironment(t *testing.T) { // because of isolation; the environment is scoped to the process. t.Setenv("PATH", path) t.Setenv("HOME", pwd) + if runtime.GOOS == "windows" { + t.Setenv("USERPROFILE", pwd) + } } From 21ff71ceea0dc1b03747e11ad73fa77cf4ff18e1 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Tue, 12 Sep 2023 15:38:43 +0200 Subject: [PATCH 135/139] Add documentation link bundle command group description (#770) Help output: ``` shreyas.goenka@THW32HFW6T ~ % cli bundle -h Databricks Asset Bundles. Documentation URL: https://docs.databricks.com/en/dev-tools/bundles. Usage: databricks bundle [command] ``` --------- Co-authored-by: Pieter Noordhuis --- cmd/bundle/bundle.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/bundle/bundle.go b/cmd/bundle/bundle.go index c933ec9c3..d8382d172 100644 --- a/cmd/bundle/bundle.go +++ b/cmd/bundle/bundle.go @@ -7,7 +7,7 @@ import ( func New() *cobra.Command { cmd := &cobra.Command{ Use: "bundle", - Short: "Databricks Asset Bundles", + Short: "Databricks Asset Bundles\n\nOnline documentation: https://docs.databricks.com/en/dev-tools/bundles", } initVariableFlag(cmd) From 96d807fb858ef0a413497ea32c86d117f4f7d91d Mon Sep 17 00:00:00 2001 From: Pieter Noordhuis Date: Tue, 12 Sep 2023 16:35:36 +0200 Subject: [PATCH 136/139] Release v0.205.0 (#771) This release marks the public preview phase of Databricks Asset Bundles. For more information, please refer to our online documentation at https://docs.databricks.com/en/dev-tools/bundles/. CLI: * Prompt once for a client profile ([#727](https://github.com/databricks/cli/pull/727)). Bundles: * Use clearer error message when no interpolation value is found. ([#764](https://github.com/databricks/cli/pull/764)). * Use interactive prompt to select resource to run if not specified ([#762](https://github.com/databricks/cli/pull/762)). * Add documentation link bundle command group description ([#770](https://github.com/databricks/cli/pull/770)). --- CHANGELOG.md | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index ba0dbcdcf..867e086be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Version changelog +## 0.205.0 + +This release marks the public preview phase of Databricks Asset Bundles. + +For more information, please refer to our online documentation at +https://docs.databricks.com/en/dev-tools/bundles/. + +CLI: + * Prompt once for a client profile ([#727](https://github.com/databricks/cli/pull/727)). + +Bundles: + * Use clearer error message when no interpolation value is found. ([#764](https://github.com/databricks/cli/pull/764)). + * Use interactive prompt to select resource to run if not specified ([#762](https://github.com/databricks/cli/pull/762)). + * Add documentation link bundle command group description ([#770](https://github.com/databricks/cli/pull/770)). + + ## 0.204.1 Bundles: From be55310cc9640875c7e30ecc114193c439581f0d Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Wed, 13 Sep 2023 19:57:31 +0200 Subject: [PATCH 137/139] Use enums for default python template (#765) ## Changes This PR changes schema to use the enum type for the default template yes/no questions. ## Tests Manually --- .../default-python/databricks_template_schema.json | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/libs/template/templates/default-python/databricks_template_schema.json b/libs/template/templates/default-python/databricks_template_schema.json index 22c65f309..db8adcce1 100644 --- a/libs/template/templates/default-python/databricks_template_schema.json +++ b/libs/template/templates/default-python/databricks_template_schema.json @@ -7,26 +7,23 @@ "order": 1 }, "include_notebook": { - "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", "type": "string", "default": "yes", - "pattern": "^(yes|no)$", + "enum": ["yes", "no"], "description": "Include a stub (sample) notebook in 'my_project/src'", "order": 2 }, "include_dlt": { - "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", "type": "string", "default": "yes", - "pattern": "^(yes|no)$", + "enum": ["yes", "no"], "description": "Include a stub (sample) DLT pipeline in 'my_project/src'", "order": 3 }, "include_python": { - "todo": "use an enum here, see https://github.com/databricks/cli/pull/668", "type": "string", "default": "yes", - "pattern": "^(yes|no)$", + "enum": ["yes", "no"], "description": "Include a stub (sample) Python package 'my_project/src'", "order": 4 } From fe32c46dc88383e8bc14ddd40339052a0948b944 Mon Sep 17 00:00:00 2001 From: shreyas-goenka <88374338+shreyas-goenka@users.noreply.github.com> Date: Thu, 14 Sep 2023 00:50:37 +0200 Subject: [PATCH 138/139] Make bundle deploy work if no resources are defined (#767) ## Changes This PR sets "resource" to nil in the terraform representation if no resources are defined in the bundle configuration. This solves two problems: 1. Makes bundle deploy work without any resources specified. 2. Previously if a `resources` block was removed after a deployment, that would fail with an error. Now the resources would get destroyed as expected. Also removes `TerraformHasNoResources` which is no longer needed. ## Tests New e2e tests. --- bundle/bundle.go | 4 -- bundle/deploy/terraform/apply.go | 4 -- bundle/deploy/terraform/convert.go | 10 +++- bundle/deploy/terraform/convert_test.go | 22 ++++---- bundle/deploy/terraform/write.go | 3 +- .../databricks_template_schema.json | 8 +++ .../template/databricks.yml.tmpl | 8 +++ .../template/foo.py | 1 + .../template/resources.yml.tmpl | 7 +++ .../bundles/empty_bundle/databricks.yml | 2 + .../deploy_then_remove_resources_test.go | 55 +++++++++++++++++++ internal/bundle/empty_bundle_test.go | 37 +++++++++++++ 12 files changed, 138 insertions(+), 23 deletions(-) create mode 100644 internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json create mode 100644 internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl create mode 100644 internal/bundle/bundles/deploy_then_remove_resources/template/foo.py create mode 100644 internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl create mode 100644 internal/bundle/bundles/empty_bundle/databricks.yml create mode 100644 internal/bundle/deploy_then_remove_resources_test.go create mode 100644 internal/bundle/empty_bundle_test.go diff --git a/bundle/bundle.go b/bundle/bundle.go index 4fc605398..61bf1ffe4 100644 --- a/bundle/bundle.go +++ b/bundle/bundle.go @@ -38,10 +38,6 @@ type Bundle struct { // Stores an initialized copy of this bundle's Terraform wrapper. Terraform *tfexec.Terraform - // Indicates that the Terraform definition based on this bundle is empty, - // i.e. that it would deploy no resources. - TerraformHasNoResources bool - // Stores the locker responsible for acquiring/releasing a deployment lock. Locker *locker.Locker diff --git a/bundle/deploy/terraform/apply.go b/bundle/deploy/terraform/apply.go index 53cffbbaf..ab868f765 100644 --- a/bundle/deploy/terraform/apply.go +++ b/bundle/deploy/terraform/apply.go @@ -16,10 +16,6 @@ func (w *apply) Name() string { } func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) error { - if b.TerraformHasNoResources { - cmdio.LogString(ctx, "Note: there are no resources to deploy for this bundle") - return nil - } tf := b.Terraform if tf == nil { return fmt.Errorf("terraform not initialized") diff --git a/bundle/deploy/terraform/convert.go b/bundle/deploy/terraform/convert.go index 0956ea7bb..7d95e719d 100644 --- a/bundle/deploy/terraform/convert.go +++ b/bundle/deploy/terraform/convert.go @@ -49,7 +49,7 @@ func convPermission(ac resources.Permission) schema.ResourcePermissionsAccessCon // // NOTE: THIS IS CURRENTLY A HACK. WE NEED A BETTER WAY TO // CONVERT TO/FROM TERRAFORM COMPATIBLE FORMAT. -func BundleToTerraform(config *config.Root) (*schema.Root, bool) { +func BundleToTerraform(config *config.Root) *schema.Root { tfroot := schema.NewRoot() tfroot.Provider = schema.NewProviders() tfroot.Resource = schema.NewResources() @@ -174,7 +174,13 @@ func BundleToTerraform(config *config.Root) (*schema.Root, bool) { } } - return tfroot, noResources + // We explicitly set "resource" to nil to omit it from a JSON encoding. + // This is required because the terraform CLI requires >= 1 resources defined + // if the "resource" property is used in a .tf.json file. + if noResources { + tfroot.Resource = nil + } + return tfroot } func TerraformToBundle(state *tfjson.State, config *config.Root) error { diff --git a/bundle/deploy/terraform/convert_test.go b/bundle/deploy/terraform/convert_test.go index ad6266066..b6b29f35a 100644 --- a/bundle/deploy/terraform/convert_test.go +++ b/bundle/deploy/terraform/convert_test.go @@ -51,7 +51,7 @@ func TestConvertJob(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) assert.Len(t, out.Resource.Job["my_job"].JobCluster, 1) assert.Equal(t, "https://github.com/foo/bar", out.Resource.Job["my_job"].GitSource.Url) @@ -79,7 +79,7 @@ func TestConvertJobPermissions(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["job_my_job"].JobId) assert.Len(t, out.Resource.Permissions["job_my_job"].AccessControl, 1) @@ -115,7 +115,7 @@ func TestConvertJobTaskLibraries(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.Equal(t, "my job", out.Resource.Job["my_job"].Name) require.Len(t, out.Resource.Job["my_job"].Task, 1) require.Len(t, out.Resource.Job["my_job"].Task[0].Library, 1) @@ -149,7 +149,7 @@ func TestConvertPipeline(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.Equal(t, "my pipeline", out.Resource.Pipeline["my_pipeline"].Name) assert.Len(t, out.Resource.Pipeline["my_pipeline"].Library, 2) assert.Nil(t, out.Data) @@ -173,7 +173,7 @@ func TestConvertPipelinePermissions(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["pipeline_my_pipeline"].PipelineId) assert.Len(t, out.Resource.Permissions["pipeline_my_pipeline"].AccessControl, 1) @@ -208,7 +208,7 @@ func TestConvertModel(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.Equal(t, "name", out.Resource.MlflowModel["my_model"].Name) assert.Equal(t, "description", out.Resource.MlflowModel["my_model"].Description) assert.Len(t, out.Resource.MlflowModel["my_model"].Tags, 2) @@ -237,7 +237,7 @@ func TestConvertModelPermissions(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["mlflow_model_my_model"].RegisteredModelId) assert.Len(t, out.Resource.Permissions["mlflow_model_my_model"].AccessControl, 1) @@ -261,7 +261,7 @@ func TestConvertExperiment(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.Equal(t, "name", out.Resource.MlflowExperiment["my_experiment"].Name) assert.Nil(t, out.Data) } @@ -284,7 +284,7 @@ func TestConvertExperimentPermissions(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["mlflow_experiment_my_experiment"].ExperimentId) assert.Len(t, out.Resource.Permissions["mlflow_experiment_my_experiment"].AccessControl, 1) @@ -327,7 +327,7 @@ func TestConvertModelServing(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) resource := out.Resource.ModelServing["my_model_serving_endpoint"] assert.Equal(t, "name", resource.Name) assert.Equal(t, "model_name", resource.Config.ServedModels[0].ModelName) @@ -357,7 +357,7 @@ func TestConvertModelServingPermissions(t *testing.T) { }, } - out, _ := BundleToTerraform(&config) + out := BundleToTerraform(&config) assert.NotEmpty(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].ServingEndpointId) assert.Len(t, out.Resource.Permissions["model_serving_my_model_serving_endpoint"].AccessControl, 1) diff --git a/bundle/deploy/terraform/write.go b/bundle/deploy/terraform/write.go index eca79ad21..b53f9069d 100644 --- a/bundle/deploy/terraform/write.go +++ b/bundle/deploy/terraform/write.go @@ -21,8 +21,7 @@ func (w *write) Apply(ctx context.Context, b *bundle.Bundle) error { return err } - root, noResources := BundleToTerraform(&b.Config) - b.TerraformHasNoResources = noResources + root := BundleToTerraform(&b.Config) f, err := os.Create(filepath.Join(dir, "bundle.tf.json")) if err != nil { return err diff --git a/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json b/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json new file mode 100644 index 000000000..cfed842cb --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/databricks_template_schema.json @@ -0,0 +1,8 @@ +{ + "properties": { + "unique_id": { + "type": "string", + "description": "Unique ID for job name" + } + } +} diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl b/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl new file mode 100644 index 000000000..c0e840c85 --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/databricks.yml.tmpl @@ -0,0 +1,8 @@ +bundle: + name: deploy-then-remove + +workspace: + root_path: "~/.bundle/{{.unique_id}}" + +include: + - "./*.yml" diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py b/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py new file mode 100644 index 000000000..11b15b1a4 --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/foo.py @@ -0,0 +1 @@ +print("hello") diff --git a/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl b/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl new file mode 100644 index 000000000..b74344e4c --- /dev/null +++ b/internal/bundle/bundles/deploy_then_remove_resources/template/resources.yml.tmpl @@ -0,0 +1,7 @@ +resources: + pipelines: + bar: + name: test-bundle-pipeline-{{.unique_id}} + libraries: + - notebook: + path: "./foo.py" diff --git a/internal/bundle/bundles/empty_bundle/databricks.yml b/internal/bundle/bundles/empty_bundle/databricks.yml new file mode 100644 index 000000000..efc627820 --- /dev/null +++ b/internal/bundle/bundles/empty_bundle/databricks.yml @@ -0,0 +1,2 @@ +bundle: + name: abc diff --git a/internal/bundle/deploy_then_remove_resources_test.go b/internal/bundle/deploy_then_remove_resources_test.go new file mode 100644 index 000000000..73860593c --- /dev/null +++ b/internal/bundle/deploy_then_remove_resources_test.go @@ -0,0 +1,55 @@ +package bundle + +import ( + "context" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/databricks/databricks-sdk-go" + "github.com/google/uuid" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAccBundleDeployThenRemoveResources(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + uniqueId := uuid.New().String() + bundleRoot, err := initTestTemplate(t, "deploy_then_remove_resources", map[string]any{ + "unique_id": uniqueId, + }) + require.NoError(t, err) + + // deploy pipeline + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + w, err := databricks.NewWorkspaceClient() + require.NoError(t, err) + + // assert pipeline is created + pipelineName := "test-bundle-pipeline-" + uniqueId + pipeline, err := w.Pipelines.GetByName(context.Background(), pipelineName) + require.NoError(t, err) + assert.Equal(t, pipeline.Name, pipelineName) + + // delete resources.yml + err = os.Remove(filepath.Join(bundleRoot, "resources.yml")) + require.NoError(t, err) + + // deploy again + err = deployBundle(t, bundleRoot) + require.NoError(t, err) + + // assert pipeline is deleted + _, err = w.Pipelines.GetByName(context.Background(), pipelineName) + assert.ErrorContains(t, err, "does not exist") + + t.Cleanup(func() { + err = destroyBundle(t, bundleRoot) + require.NoError(t, err) + }) +} diff --git a/internal/bundle/empty_bundle_test.go b/internal/bundle/empty_bundle_test.go new file mode 100644 index 000000000..9b39368f4 --- /dev/null +++ b/internal/bundle/empty_bundle_test.go @@ -0,0 +1,37 @@ +package bundle + +import ( + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/databricks/cli/internal" + "github.com/google/uuid" + "github.com/stretchr/testify/require" +) + +func TestAccEmptyBundleDeploy(t *testing.T) { + env := internal.GetEnvOrSkipTest(t, "CLOUD_ENV") + t.Log(env) + + // create empty bundle + tmpDir := t.TempDir() + f, err := os.Create(filepath.Join(tmpDir, "databricks.yml")) + require.NoError(t, err) + + bundleRoot := fmt.Sprintf(`bundle: + name: %s`, uuid.New().String()) + _, err = f.WriteString(bundleRoot) + require.NoError(t, err) + f.Close() + + // deploy empty bundle + err = deployBundle(t, tmpDir) + require.NoError(t, err) + + t.Cleanup(func() { + err = destroyBundle(t, tmpDir) + require.NoError(t, err) + }) +} From 953dcb4972fe10320d4f5fa173851a86b4429083 Mon Sep 17 00:00:00 2001 From: Andrew Nester Date: Thu, 14 Sep 2023 12:14:13 +0200 Subject: [PATCH 139/139] Added support for experimental scripts section (#632) ## Changes Added support for experimental scripts section It allows execution of arbitrary bash commands during certain bundle lifecycle steps. ## Tests Example of configuration ```yaml bundle: name: wheel-task workspace: host: *** experimental: scripts: prebuild: | echo 'Prebuild 1' echo 'Prebuild 2' postbuild: "echo 'Postbuild 1' && echo 'Postbuild 2'" predeploy: | echo 'Checking go version...' go version postdeploy: | echo 'Checking python version...' python --version resources: jobs: test_job: name: "[${bundle.environment}] My Wheel Job" tasks: - task_key: TestTask existing_cluster_id: "***" python_wheel_task: package_name: "my_test_code" entry_point: "run" libraries: - whl: ./dist/*.whl ``` Output ```bash andrew.nester@HFW9Y94129 wheel % databricks bundle deploy artifacts.whl.AutoDetect: Detecting Python wheel project... artifacts.whl.AutoDetect: Found Python wheel project at /Users/andrew.nester/dabs/wheel 'Prebuild 1' 'Prebuild 2' artifacts.whl.Build(my_test_code): Building... artifacts.whl.Build(my_test_code): Build succeeded 'Postbuild 1' 'Postbuild 2' 'Checking go version...' go version go1.19.9 darwin/arm64 Starting upload of bundle files Uploaded bundle files at /Users/andrew.nester@databricks.com/.bundle/wheel-task/default/files! artifacts.Upload(my_test_code-0.0.0a0-py3-none-any.whl): Uploading... artifacts.Upload(my_test_code-0.0.0a0-py3-none-any.whl): Upload succeeded Starting resource deployment Resource deployment completed! 'Checking python version...' Python 2.7.18 ``` --- bundle/config/experimental.go | 18 +++++++ bundle/config/mutator/mutator.go | 3 ++ bundle/config/root.go | 2 + bundle/phases/build.go | 4 ++ bundle/phases/deploy.go | 4 ++ bundle/phases/initialize.go | 3 ++ bundle/scripts/scripts.go | 91 ++++++++++++++++++++++++++++++++ bundle/scripts/scripts_test.go | 32 +++++++++++ 8 files changed, 157 insertions(+) create mode 100644 bundle/config/experimental.go create mode 100644 bundle/scripts/scripts.go create mode 100644 bundle/scripts/scripts_test.go diff --git a/bundle/config/experimental.go b/bundle/config/experimental.go new file mode 100644 index 000000000..be0e7d8fe --- /dev/null +++ b/bundle/config/experimental.go @@ -0,0 +1,18 @@ +package config + +type Experimental struct { + Scripts map[ScriptHook]Command `json:"scripts,omitempty"` +} + +type Command string +type ScriptHook string + +// These hook names are subject to change and currently experimental +const ( + ScriptPreInit ScriptHook = "preinit" + ScriptPostInit ScriptHook = "postinit" + ScriptPreBuild ScriptHook = "prebuild" + ScriptPostBuild ScriptHook = "postbuild" + ScriptPreDeploy ScriptHook = "predeploy" + ScriptPostDeploy ScriptHook = "postdeploy" +) diff --git a/bundle/config/mutator/mutator.go b/bundle/config/mutator/mutator.go index ff1f96f50..aa762e8e6 100644 --- a/bundle/config/mutator/mutator.go +++ b/bundle/config/mutator/mutator.go @@ -2,10 +2,13 @@ package mutator import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/bundle/scripts" ) func DefaultMutators() []bundle.Mutator { return []bundle.Mutator{ + scripts.Execute(config.ScriptPreInit), ProcessRootIncludes(), DefineDefaultTarget(), LoadGitDetails(), diff --git a/bundle/config/root.go b/bundle/config/root.go index 0377f60a0..465d8a62e 100644 --- a/bundle/config/root.go +++ b/bundle/config/root.go @@ -84,6 +84,8 @@ type Root struct { // RunAs section allows to define an execution identity for jobs and pipelines runs RunAs *jobs.JobRunAs `json:"run_as,omitempty"` + + Experimental *Experimental `json:"experimental,omitempty"` } func Load(path string) (*Root, error) { diff --git a/bundle/phases/build.go b/bundle/phases/build.go index fe90c3691..760967fca 100644 --- a/bundle/phases/build.go +++ b/bundle/phases/build.go @@ -3,7 +3,9 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/interpolation" + "github.com/databricks/cli/bundle/scripts" ) // The build phase builds artifacts. @@ -11,9 +13,11 @@ func Build() bundle.Mutator { return newPhase( "build", []bundle.Mutator{ + scripts.Execute(config.ScriptPreBuild), artifacts.DetectPackages(), artifacts.InferMissingProperties(), artifacts.BuildAll(), + scripts.Execute(config.ScriptPostBuild), interpolation.Interpolate( interpolation.IncludeLookupsInPath("artifacts"), ), diff --git a/bundle/phases/deploy.go b/bundle/phases/deploy.go index 5a9a7f2fe..a8ca75186 100644 --- a/bundle/phases/deploy.go +++ b/bundle/phases/deploy.go @@ -3,17 +3,20 @@ package phases import ( "github.com/databricks/cli/bundle" "github.com/databricks/cli/bundle/artifacts" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/deploy/files" "github.com/databricks/cli/bundle/deploy/lock" "github.com/databricks/cli/bundle/deploy/terraform" "github.com/databricks/cli/bundle/libraries" "github.com/databricks/cli/bundle/python" + "github.com/databricks/cli/bundle/scripts" ) // The deploy phase deploys artifacts and resources. func Deploy() bundle.Mutator { deployMutator := bundle.Seq( + scripts.Execute(config.ScriptPreDeploy), lock.Acquire(), bundle.Defer( bundle.Seq( @@ -31,6 +34,7 @@ func Deploy() bundle.Mutator { ), lock.Release(lock.GoalDeploy), ), + scripts.Execute(config.ScriptPostDeploy), ) return newPhase( diff --git a/bundle/phases/initialize.go b/bundle/phases/initialize.go index 546a8478b..431fe27d4 100644 --- a/bundle/phases/initialize.go +++ b/bundle/phases/initialize.go @@ -2,10 +2,12 @@ package phases import ( "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" "github.com/databricks/cli/bundle/config/interpolation" "github.com/databricks/cli/bundle/config/mutator" "github.com/databricks/cli/bundle/config/variable" "github.com/databricks/cli/bundle/deploy/terraform" + "github.com/databricks/cli/bundle/scripts" ) // The initialize phase fills in defaults and connects to the workspace. @@ -30,6 +32,7 @@ func Initialize() bundle.Mutator { mutator.ProcessTargetMode(), mutator.TranslatePaths(), terraform.Initialize(), + scripts.Execute(config.ScriptPostInit), }, ) } diff --git a/bundle/scripts/scripts.go b/bundle/scripts/scripts.go new file mode 100644 index 000000000..1a8a471ca --- /dev/null +++ b/bundle/scripts/scripts.go @@ -0,0 +1,91 @@ +package scripts + +import ( + "bufio" + "context" + "fmt" + "io" + "os/exec" + "strings" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/databricks/cli/libs/cmdio" + "github.com/databricks/cli/libs/log" +) + +func Execute(hook config.ScriptHook) bundle.Mutator { + return &script{ + scriptHook: hook, + } +} + +type script struct { + scriptHook config.ScriptHook +} + +func (m *script) Name() string { + return fmt.Sprintf("scripts.%s", m.scriptHook) +} + +func (m *script) Apply(ctx context.Context, b *bundle.Bundle) error { + cmd, out, err := executeHook(ctx, b, m.scriptHook) + if err != nil { + return err + } + if cmd == nil { + log.Debugf(ctx, "No script defined for %s, skipping", m.scriptHook) + return nil + } + + cmdio.LogString(ctx, fmt.Sprintf("Executing '%s' script", m.scriptHook)) + + reader := bufio.NewReader(out) + line, err := reader.ReadString('\n') + for err == nil { + cmdio.LogString(ctx, strings.TrimSpace(line)) + line, err = reader.ReadString('\n') + } + + return cmd.Wait() +} + +func executeHook(ctx context.Context, b *bundle.Bundle, hook config.ScriptHook) (*exec.Cmd, io.Reader, error) { + command := getCommmand(b, hook) + if command == "" { + return nil, nil, nil + } + + interpreter, err := findInterpreter() + if err != nil { + return nil, nil, err + } + + cmd := exec.CommandContext(ctx, interpreter, "-c", string(command)) + cmd.Dir = b.Config.Path + + outPipe, err := cmd.StdoutPipe() + if err != nil { + return nil, nil, err + } + + errPipe, err := cmd.StderrPipe() + if err != nil { + return nil, nil, err + } + + return cmd, io.MultiReader(outPipe, errPipe), cmd.Start() +} + +func getCommmand(b *bundle.Bundle, hook config.ScriptHook) config.Command { + if b.Config.Experimental == nil || b.Config.Experimental.Scripts == nil { + return "" + } + + return b.Config.Experimental.Scripts[hook] +} + +func findInterpreter() (string, error) { + // At the moment we just return 'sh' on all platforms and use it to execute scripts + return "sh", nil +} diff --git a/bundle/scripts/scripts_test.go b/bundle/scripts/scripts_test.go new file mode 100644 index 000000000..8b7aa0d1b --- /dev/null +++ b/bundle/scripts/scripts_test.go @@ -0,0 +1,32 @@ +package scripts + +import ( + "bufio" + "context" + "strings" + "testing" + + "github.com/databricks/cli/bundle" + "github.com/databricks/cli/bundle/config" + "github.com/stretchr/testify/require" +) + +func TestExecutesHook(t *testing.T) { + b := &bundle.Bundle{ + Config: config.Root{ + Experimental: &config.Experimental{ + Scripts: map[config.ScriptHook]config.Command{ + config.ScriptPreBuild: "echo 'Hello'", + }, + }, + }, + } + _, out, err := executeHook(context.Background(), b, config.ScriptPreBuild) + require.NoError(t, err) + + reader := bufio.NewReader(out) + line, err := reader.ReadString('\n') + + require.NoError(t, err) + require.Equal(t, "Hello", strings.TrimSpace(line)) +}